{ "architectures": [ "LlamaForCausalLM" ], "model_type": "llama", "torch_dtype": "float16", "prompt_template": { "type": "alpaca", "description": "Alpaca instruction-following format" } }