{ "dim": 1024, "ffn_dim_multiplier": null, "flash": true, "max_batch_size": 8, "max_seq_len": 1024, "multiple_of": 512, "n_heads": 8, "n_kv_heads": 4, "n_layers": 8, "norm_eps": 1e-05, "rope_theta": 500000, "use_scaled_rope": false, "vocab_size": 32001 }