pixtral-12b-vision / config.json
jurgisp's picture
Upload model
7a5fee3 verified
raw
history blame contribute delete
474 Bytes
{
"architectures": [
"PixtralVisionModel"
],
"attention_dropout": 0.0,
"head_dim": 64,
"hidden_act": "gelu",
"hidden_size": 1024,
"image_size": 1024,
"intermediate_size": 4096,
"is_composition": true,
"model_type": "pixtral",
"num_attention_heads": 16,
"num_channels": 3,
"num_hidden_layers": 24,
"patch_size": 16,
"rope_theta": 10000.0,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.46.0.dev0"
}