SashaSheykina
commited on
Commit
•
43422fa
1
Parent(s):
0d8bfb2
Training in progress, epoch 1
Browse files- model.safetensors +1 -1
- runs/Aug03_09-03-57_21ed6c99437d/events.out.tfevents.1722675838.21ed6c99437d.3857.3 +2 -2
- runs/Aug03_09-03-57_21ed6c99437d/events.out.tfevents.1722676079.21ed6c99437d.3857.4 +3 -0
- runs/Aug03_09-15-49_21ed6c99437d/events.out.tfevents.1722676550.21ed6c99437d.3857.5 +3 -0
- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
- training_args.bin +1 -1
- vocab.json +0 -0
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 891558696
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e34f6b93f8898014a7591d0b6973aed64b9094cc17ffd307d26847c3e5e38a3
|
3 |
size 891558696
|
runs/Aug03_09-03-57_21ed6c99437d/events.out.tfevents.1722675838.21ed6c99437d.3857.3
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c1bf3e518387f7ca7474c48b3c2afe488c9e512a51584c932fe860284062bad8
|
3 |
+
size 6383
|
runs/Aug03_09-03-57_21ed6c99437d/events.out.tfevents.1722676079.21ed6c99437d.3857.4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d6a9ab00fc71e3a92f4d303b5c018621b29e4ca90027efce27c3fcd16b9bf65b
|
3 |
+
size 648
|
runs/Aug03_09-15-49_21ed6c99437d/events.out.tfevents.1722676550.21ed6c99437d.3857.5
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:06a91bcaf3180460dba62a3efabb86a08e0029a50d25632484fcf0dd4ac151e7
|
3 |
+
size 4184
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -954,5 +954,6 @@
|
|
954 |
"pad_token": "</s>",
|
955 |
"sep_token": "</s>",
|
956 |
"tokenizer_class": "RobertaTokenizer",
|
|
|
957 |
"unk_token": "<unk>"
|
958 |
}
|
|
|
954 |
"pad_token": "</s>",
|
955 |
"sep_token": "</s>",
|
956 |
"tokenizer_class": "RobertaTokenizer",
|
957 |
+
"trim_offsets": true,
|
958 |
"unk_token": "<unk>"
|
959 |
}
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5304
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ec947d016f84c6378f7fd7ff56fa4580ee2c243050a75498e9140c7ba58c598e
|
3 |
size 5304
|
vocab.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|