File size: 319 Bytes
8c8cf87
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
  "bits": 4,
  "group_size": 128,
  "damp_percent": 0.01,
  "desc_act": false,
  "static_groups": false,
  "sym": true,
  "true_sequential": true,
  "model_name_or_path": "/app/Mistral/AutoGPTQ-Mistral-Model",
  "model_file_base_name": "gptq_model-4bit-128g",
  "quant_method": "gptq",
  "checkpoint_format": "gptq"
}