codebyzeb commited on
Commit
5e6584e
1 Parent(s): 2f27f0e

Training in progress, step 400000

Browse files
babyslm/syntactic.txt CHANGED
The diff for this file is too large to render. See raw diff
 
blimp_results.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cc48472f3cf705c5dd227b5fcfa7b1a7cc6c86e80a3c93113e48f450de38f4ee
3
- size 80216243
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3f65138b25730d1a54c3acb47434402e99eebbeeb7b7e2ab62d762ed4c7d66d
3
+ size 80227472
config.json CHANGED
@@ -3,9 +3,9 @@
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
- "attn_pdrop": 0.3,
7
  "bos_token_id": 0,
8
- "embd_pdrop": 0.3,
9
  "eos_token_id": 0,
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
@@ -16,7 +16,7 @@
16
  "n_layer": 6,
17
  "n_positions": 256,
18
  "reorder_and_upcast_attn": false,
19
- "resid_pdrop": 0.3,
20
  "scale_attn_by_inverse_layer_idx": false,
21
  "scale_attn_weights": true,
22
  "summary_activation": null,
 
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
+ "attn_pdrop": 0.1,
7
  "bos_token_id": 0,
8
+ "embd_pdrop": 0.1,
9
  "eos_token_id": 0,
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
 
16
  "n_layer": 6,
17
  "n_positions": 256,
18
  "reorder_and_upcast_attn": false,
19
+ "resid_pdrop": 0.1,
20
  "scale_attn_by_inverse_layer_idx": false,
21
  "scale_attn_weights": true,
22
  "summary_activation": null,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6dde8a13f79af091982f905bc438434c5cbbd412885e74c595cc8a9474c06228
3
  size 108961160
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f834b2b63234d08a7e518d0f313ccb8df4b26fadebff9b783e3c3e45031801a8
3
  size 108961160
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e0b5a8b6f1890201775a9dff957294eb0b004f3daffec4d13e5aebb84bcac43e
3
  size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5dc80136f7f23f477ebdd661d182228c4de21c404425ed1d103d090bb1541237
3
  size 5368