File size: 304 Bytes
8072674 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
{
"dim": 4096,
"n_layers": 32,
"head_dim": 128,
"hidden_dim": 14336,
"n_heads": 32,
"n_kv_heads": 8,
"rope_theta": 1000000.0,
"norm_eps": 1e-05,
"vocab_size": 32000,
"quantization": {
"group_size": 64,
"bits": 4
},
"model_type": "mistral"
} |