Update config.json
Browse files- config.json +11 -1
config.json
CHANGED
@@ -38,7 +38,17 @@
|
|
38 |
"true_sequential": true,
|
39 |
"model_name_or_path": null,
|
40 |
"model_file_base_name": "model",
|
41 |
-
"inside_layer_modules" : [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
"quant_method": "gptq"
|
43 |
}
|
44 |
}
|
|
|
38 |
"true_sequential": true,
|
39 |
"model_name_or_path": null,
|
40 |
"model_file_base_name": "model",
|
41 |
+
"inside_layer_modules" : [
|
42 |
+
["self_attn.k_proj", "self_attn.v_proj", "self_attn.q_proj"],
|
43 |
+
["self_attn.o_proj"],
|
44 |
+
["block_sparse_moe.experts.0.w1", "block_sparse_moe.experts.0.w2", "block_sparse_moe.experts.0.w3"],
|
45 |
+
["block_sparse_moe.experts.1.w1", "block_sparse_moe.experts.1.w2", "block_sparse_moe.experts.1.w3"],
|
46 |
+
["block_sparse_moe.experts.2.w1", "block_sparse_moe.experts.2.w2", "block_sparse_moe.experts.2.w3"],
|
47 |
+
["block_sparse_moe.experts.3.w1", "block_sparse_moe.experts.3.w2", "block_sparse_moe.experts.3.w3"],
|
48 |
+
["block_sparse_moe.experts.4.w1", "block_sparse_moe.experts.4.w2", "block_sparse_moe.experts.4.w3"],
|
49 |
+
["block_sparse_moe.experts.5.w1", "block_sparse_moe.experts.5.w2", "block_sparse_moe.experts.5.w3"],
|
50 |
+
["block_sparse_moe.experts.6.w1", "block_sparse_moe.experts.6.w2", "block_sparse_moe.experts.6.w3"],
|
51 |
+
["block_sparse_moe.experts.7.w1", "block_sparse_moe.experts.7.w2", "block_sparse_moe.experts.7.w3"]],
|
52 |
"quant_method": "gptq"
|
53 |
}
|
54 |
}
|