LlamaGuard-7B-GPTQ / consolidate_params.json
TheBloke's picture
GPTQ model commit
e70f10a
raw
history blame
429 Bytes
{
"dtype": "fp16",
"manifold_bucket": "genai_llm_fb",
"model_parallel_size": 1,
"on_gpu": true,
"src": "checkpoints/mast/inan/2023-11-27/080608_VAx9Hcb0THuGhWcZP4I6OA/step_500",
"tgt": "checkpoints/mast/inan/2023-11-27/080608_VAx9Hcb0THuGhWcZP4I6OA_step_500_consolidated_mp1",
"tokenizer_dir": "/data/metaformers/local_cache/tokenizer",
"tokenizer_name": "tokenizer_final_32k.minus_inf_ws.model"
}