ybelkada commited on
Commit
2ab4b46
1 Parent(s): f21202d

Update config.json

Browse files

This should fix the issue stated in https://huggingface.co/facebook/galactica-30b/discussions/4 together with the PR: https://github.com/huggingface/transformers/pull/20390

Files changed (1) hide show
  1. config.json +3 -1
config.json CHANGED
@@ -26,5 +26,7 @@
26
  "transformers_version": "4.21.0.dev0",
27
  "use_cache": true,
28
  "vocab_size": 50000,
29
- "word_embed_proj_dim": 7168
 
 
30
  }
 
26
  "transformers_version": "4.21.0.dev0",
27
  "use_cache": true,
28
  "vocab_size": 50000,
29
+ "word_embed_proj_dim": 7168,
30
+ "enable_bias": false,
31
+ "layer_norm_elementwise_affine": false,
32
  }