File size: 493 Bytes
8a6e044 |
1 |
{"seed": 49, "batch_size": 1024, "buffer_mult": 384, "lr": 5e-05, "num_tokens": 1000000000, "l1_coeff": 3.688410215545445e-05, "beta1": 0.9, "beta2": 0.99, "dict_mult": 8, "seq_len": 128, "enc_dtype": "fp32", "remove_rare_dir": false, "model_name": "gpt2-small", "site": "mlp_out", "layer": 2, "device": "cuda", "model_batch_size": 128, "buffer_size": 393216, "buffer_batches": 3072, "act_name": "blocks.2.hook_mlp_out", "act_size": 768, "dict_size": 6144, "name": "gpt2-small_2_6144_mlp_out"} |