gweltou commited on
Commit
e4b333d
1 Parent(s): e4ed046

Training in progress, step 1000

Browse files
config.json CHANGED
@@ -43,7 +43,7 @@
43
  "feat_extract_activation": "gelu",
44
  "feat_extract_dropout": 0.0,
45
  "feat_extract_norm": "group",
46
- "feat_proj_dropout": 0.0,
47
  "feat_proj_layer_norm": true,
48
  "final_dropout": 0.1,
49
  "gradient_checkpointing": false,
 
43
  "feat_extract_activation": "gelu",
44
  "feat_extract_dropout": 0.0,
45
  "feat_extract_norm": "group",
46
+ "feat_proj_dropout": 0.1,
47
  "feat_proj_layer_norm": true,
48
  "final_dropout": 0.1,
49
  "gradient_checkpointing": false,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:127529f98e6b24392e456293de8d57797257a3d4eda8a228cee82fdb73d3b991
3
  size 377666024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3700ebedd7ed60d4610cf46173f53f60da7bae4076706749827952373376e6a
3
  size 377666024
runs/Jun16_15-07-50_gweltaz-NUC10i7FNK/events.out.tfevents.1718543599.gweltaz-NUC10i7FNK.2885.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2851e4b5b834fa28ac3ac640eba291ef91e788b8ab048bcd1e0ba3a63b6c1237
3
- size 13612
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9135abba5707619e4324a3cdb6f6b3cdd98321c2ef37a4a22d99a6f9e7926fc
3
+ size 14188
runs/Jun16_17-48-34_gweltaz-NUC10i7FNK/events.out.tfevents.1718553039.gweltaz-NUC10i7FNK.4395.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:241d5ac9313ccec2ab7f2530b3e622e9e440ca858519472a8430b13eaad926eb
3
+ size 6125
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5859be9c52daf23f3ac39ddb3f85e64d25650448d9a022c5be9f0f5b7b2fb2c5
3
  size 4475
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e9b6451d2fe498fb9129651c59dac9ce4bf5c543386d5e47e6bf249502954a8
3
  size 4475