File size: 494 Bytes
8a6e044 |
1 |
{"seed": 49, "batch_size": 1024, "buffer_mult": 384, "lr": 5e-05, "num_tokens": 1000000000, "l1_coeff": 0.00013478870096150786, "beta1": 0.9, "beta2": 0.99, "dict_mult": 8, "seq_len": 128, "enc_dtype": "fp32", "remove_rare_dir": false, "model_name": "gpt2-small", "site": "mlp_out", "layer": 3, "device": "cuda", "model_batch_size": 128, "buffer_size": 393216, "buffer_batches": 3072, "act_name": "blocks.3.hook_mlp_out", "act_size": 768, "dict_size": 6144, "name": "gpt2-small_3_6144_mlp_out"} |