Init cpm-bee-1b
Browse files- config.json +15 -0
- pytorch_model.bin +3 -0
- vocab.txt +0 -0
config.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"vocab_size": 86583,
|
3 |
+
"dim_model": 4096,
|
4 |
+
"dim_ff" : 1024,
|
5 |
+
"num_layers" : 48,
|
6 |
+
"num_heads": 32,
|
7 |
+
"dim_head" : 40,
|
8 |
+
"dropout_p" : 0.0,
|
9 |
+
"position_bias_num_buckets" : 256,
|
10 |
+
"position_bias_num_segment_buckets": 256,
|
11 |
+
"position_bias_max_distance" : 2048,
|
12 |
+
"eps" : 1e-6,
|
13 |
+
"half" : true,
|
14 |
+
"mask_modules": [[false, false], [true, false], [false, false], [true, false], [true, true], [true, false], [true, true], [true, true], [false, false], [false, false], [true, true], [true, false], [true, false], [true, true], [false, false], [true, true], [false, false], [false, true], [true, false], [true, true], [false, false], [false, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [false, false], [true, true], [true, false], [true, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [false, false]]
|
15 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a4900eb67843181959ea77043a59a7d7e78fb688bd9ee61858658c10823c1b33
|
3 |
+
size 2110668093
|
vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|