codebyzeb commited on
Commit
c4e461f
1 Parent(s): 40b2009

Training in progress, step 250000

Browse files
babyslm/syntactic.txt CHANGED
The diff for this file is too large to render. See raw diff
 
blimp_results.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bfea65bfcce37100d270202f3ad83c790c61a8aaf8ae43bbab265110bd03fd1b
3
- size 80268261
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10604f23a65adc5593915011d1243b48d61f219befad2e7f10a73d6feda78ff4
3
+ size 80226394
config.json CHANGED
@@ -3,9 +3,9 @@
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
- "attn_pdrop": 0.1,
7
  "bos_token_id": 0,
8
- "embd_pdrop": 0.1,
9
  "eos_token_id": 0,
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
@@ -16,7 +16,7 @@
16
  "n_layer": 12,
17
  "n_positions": 256,
18
  "reorder_and_upcast_attn": false,
19
- "resid_pdrop": 0.1,
20
  "scale_attn_by_inverse_layer_idx": false,
21
  "scale_attn_weights": true,
22
  "summary_activation": null,
 
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
+ "attn_pdrop": 0.3,
7
  "bos_token_id": 0,
8
+ "embd_pdrop": 0.3,
9
  "eos_token_id": 0,
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
 
16
  "n_layer": 12,
17
  "n_positions": 256,
18
  "reorder_and_upcast_attn": false,
19
+ "resid_pdrop": 0.3,
20
  "scale_attn_by_inverse_layer_idx": false,
21
  "scale_attn_weights": true,
22
  "summary_activation": null,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bb924aeafeef2682cf5636ae4727d725b15812a11402f8efa7fc46da860de76f
3
  size 390177408
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ec58f542f7ff628ac674a695b02be0f0c4a42633bd5239f2ef4802047a0bcd5
3
  size 390177408
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5ae0dde72c52f7e1e0ff9bbc8aac95f1da19222bc12c580df46c08d305ad1f34
3
  size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6bf434d08adffb63320a056f43c3d312c362758a72db108fcc5c86bb963bff1
3
  size 5368