File size: 1,275 Bytes
47dc32d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 |
{
"one_external_file": true,
"opset": null,
"optimization": {
"disable_attention": null,
"disable_attention_fusion": false,
"disable_bias_gelu": null,
"disable_bias_gelu_fusion": false,
"disable_bias_skip_layer_norm": null,
"disable_bias_skip_layer_norm_fusion": false,
"disable_embed_layer_norm": true,
"disable_embed_layer_norm_fusion": true,
"disable_gelu": null,
"disable_gelu_fusion": false,
"disable_group_norm_fusion": true,
"disable_layer_norm": null,
"disable_layer_norm_fusion": false,
"disable_packed_kv": true,
"disable_rotary_embeddings": false,
"disable_shape_inference": true,
"disable_skip_layer_norm": null,
"disable_skip_layer_norm_fusion": false,
"enable_gelu_approximation": false,
"enable_gemm_fast_gelu_fusion": false,
"enable_transformers_specific_optimizations": false,
"fp16": false,
"no_attention_mask": false,
"optimization_level": 1,
"optimize_for_gpu": false,
"optimize_with_onnxruntime_only": null,
"use_mask_index": false,
"use_multi_head_attention": false,
"use_raw_attention_mask": false
},
"optimum_version": "1.16.2",
"quantization": {},
"transformers_version": "4.37.0",
"use_external_data_format": false
}
|