File size: 468 Bytes
54190d6
1
{"seed": 49, "batch_size": 4096, "buffer_mult": 384, "lr": 0.0001, "num_tokens": 2000000000, "l1_coeff": 0.0003, "beta1": 0.9, "beta2": 0.99, "dict_mult": 1, "seq_len": 128, "enc_dtype": "fp32", "remove_rare_dir": false, "model_name": "gpt-1l", "site": "post", "layer": 0, "device": "cuda:0", "model_batch_size": 512, "buffer_size": 1572864, "buffer_batches": 12288, "act_name": "blocks.0.mlp.hook_post", "act_size": 512, "dict_size": 512, "name": "gpt-1l_0_512_post"}