{ "architectures": [ "MistralForCausalLM" ], "max_position_embeddings": 2048, "hidden_size": 4096, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50257, "model_type": "mistral", # <- Important key! "use_cache": true, "n_head": 32, "n_layer": 32, "n_positions": 2048, "initializer_range": 0.02, "layer_norm_epsilon": 1e-5, "attention_probs_dropout_prob": 0.1 }