{ "d_model": 4096, "ssm_cfg": { "expand": 1 }, "rms_norm_eps": 1e-05, "vocab_size": null, "d_xb": 1024, "intermediate_size": 14336, "hidden_act": "silu", "n_layer": 32, "attn_layers": [ 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31 ] }