new-nlp-hw3-llama2 / config.json
Mortie1's picture
Upload MyLLaMa
ddcede3 verified
raw
history blame contribute delete
349 Bytes
{
"architectures": [
"MyLLaMa"
],
"auto_map": {
"AutoConfig": "configure_for_hf.MyLLaMaConfig",
"AutoModelForCausalLM": "configure_for_hf.MyLLaMa"
},
"embed_dim": 1536,
"model_type": "LLaMa",
"n_chckpnt_segments": 24,
"n_heads": 24,
"n_layers": 24,
"torch_dtype": "float32",
"transformers_version": "4.47.0.dev0"
}