File size: 291 Bytes
3664647 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
{
"bits": 4,
"group_size": 128,
"damp_percent": 0.01,
"desc_act": false,
"static_groups": false,
"sym": true,
"true_sequential": true,
"model_name_or_path": "./tmp-gptq",
"model_file_base_name": "gptq_model-4bit-128g",
"is_marlin_format": true,
"quant_method": "gptq"
} |