sedrickkeh
commited on
Commit
•
16906fb
1
Parent(s):
d3f3987
Update config.json
Browse files- config.json +2 -2
config.json
CHANGED
@@ -6,11 +6,11 @@
|
|
6 |
"params": null,
|
7 |
"apply_qk_norm": true,
|
8 |
"attn_activation": null,
|
9 |
-
"attn_name": "
|
10 |
"attn_seq_scalar": null,
|
11 |
"attn_seq_scalar_alpha": null,
|
12 |
"dim": 4096,
|
13 |
-
"ffn_type": "
|
14 |
"model": "open_lm_7b",
|
15 |
"model_norm": "gain_only_lp_layer_norm",
|
16 |
"moe_capacity_factor": 1.25,
|
|
|
6 |
"params": null,
|
7 |
"apply_qk_norm": true,
|
8 |
"attn_activation": null,
|
9 |
+
"attn_name": "torch_attn",
|
10 |
"attn_seq_scalar": null,
|
11 |
"attn_seq_scalar_alpha": null,
|
12 |
"dim": 4096,
|
13 |
+
"ffn_type": "swiglu_torch",
|
14 |
"model": "open_lm_7b",
|
15 |
"model_norm": "gain_only_lp_layer_norm",
|
16 |
"moe_capacity_factor": 1.25,
|