{"architecture": "standard", "d_in": 512, "d_sae": 8192, "dtype": "float32", "device": "cuda", "model_name": "tiny-stories-1L-21M", "hook_name": "blocks.0.hook_mlp_out", "hook_layer": 0, "hook_head_index": null, "activation_fn_str": "relu", "activation_fn_kwargs": {}, "apply_b_dec_to_input": false, "finetuning_scaling_factor": false, "sae_lens_training_version": "3.20.3", "prepend_bos": true, "dataset_path": "apollo-research/roneneldan-TinyStories-tokenizer-gpt2", "dataset_trust_remote_code": true, "context_size": 128, "normalize_activations": "none", "neuronpedia_id": null, "model_from_pretrained_kwargs": {"center_writing_weights": false}, "l1_coefficient": 5, "lp_norm": 1.0, "use_ghost_grads": false, "normalize_sae_decoder": false, "noise_scale": 0.0, "decoder_orthogonal_init": false, "init_encoder_as_decoder_transpose": true, "mse_loss_normalization": null, "decoder_heuristic_init": true, "scale_sparsity_penalty_by_decoder_norm": false}