phi-2 / quantize_config.json
hans00's picture
Upload folder using huggingface_hub
76e582c verified
{
"per_channel": false,
"reduce_range": false,
"quantize_mode": "q8",
"per_model_config": {
"decoder_model": {
"op_types": [
"Where",
"Gather",
"LayerNormalization",
"Less",
"Unsqueeze",
"Reshape",
"Squeeze",
"Range",
"Constant",
"Tanh",
"Shape",
"Transpose",
"Sub",
"Concat",
"ConstantOfShape",
"MatMul",
"Neg",
"Slice",
"Add",
"Expand",
"Softmax",
"Div",
"Cast",
"Mul",
"Pow",
"Equal"
],
"weight_type": "QInt8"
},
"decoder_model_merged": {
"op_types": [
"Where",
"Gather",
"LayerNormalization",
"Less",
"Unsqueeze",
"Reshape",
"Squeeze",
"Range",
"Constant",
"Tanh",
"Shape",
"Transpose",
"If",
"Sub",
"Concat",
"ConstantOfShape",
"MatMul",
"Neg",
"Slice",
"Add",
"Expand",
"Softmax",
"Div",
"Cast",
"Mul",
"Pow",
"Equal"
],
"weight_type": "QInt8"
},
"decoder_with_past_model": {
"op_types": [
"Where",
"Gather",
"LayerNormalization",
"Unsqueeze",
"Reshape",
"Range",
"Constant",
"Tanh",
"Shape",
"Transpose",
"Sub",
"Concat",
"ConstantOfShape",
"MatMul",
"Neg",
"Slice",
"Add",
"Expand",
"Softmax",
"Div",
"Cast",
"Mul",
"Pow",
"Equal"
],
"weight_type": "QInt8"
}
}
}