jbloom's picture
Add a couple more layers.
2e64e91 verified
raw
history blame contribute delete
534 Bytes
{"architecture": "standard", "d_in": 2048, "d_sae": 16384, "dtype": "float32", "device": "cuda", "model_name": "gemma-2b", "hook_name": "blocks.17.hook_resid_post", "hook_layer": 17, "hook_head_index": null, "activation_fn_str": "relu", "activation_fn_kwargs": {}, "apply_b_dec_to_input": false, "finetuning_scaling_factor": false, "sae_lens_training_version": "3.11.0", "prepend_bos": true, "dataset_path": "ctigges/openwebtext-gemma-1024-cl", "dataset_trust_remote_code": true, "context_size": 1024, "normalize_activations": "none"}