kmok1 commited on
Commit
95fa571
1 Parent(s): 67263fa

End of training

Browse files
Files changed (5) hide show
  1. README.md +59 -59
  2. config.json +1 -1
  3. generation_config.json +1 -1
  4. model.safetensors +1 -1
  5. training_args.bin +2 -2
README.md CHANGED
@@ -17,9 +17,9 @@ should probably proofread and complete it, then remove this comment. -->
17
 
18
  This model is a fine-tuned version of [facebook/m2m100_1.2B](https://huggingface.co/facebook/m2m100_1.2B) on an unknown dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 6.9907
21
- - Bleu: 0.0
22
- - Gen Len: 5.0
23
 
24
  ## Model description
25
 
@@ -48,63 +48,63 @@ The following hyperparameters were used during training:
48
 
49
  ### Training results
50
 
51
- | Training Loss | Epoch | Step | Validation Loss | Bleu | Gen Len |
52
- |:-------------:|:-----:|:----:|:---------------:|:----:|:-------:|
53
- | 4.7756 | 1.0 | 6 | 6.9340 | 0.0 | 5.0 |
54
- | 3.9022 | 2.0 | 12 | 6.9354 | 0.0 | 5.0 |
55
- | 5.0741 | 3.0 | 18 | 6.9379 | 0.0 | 5.0 |
56
- | 4.4159 | 4.0 | 24 | 6.9402 | 0.0 | 5.0 |
57
- | 3.6725 | 5.0 | 30 | 6.9430 | 0.0 | 5.0 |
58
- | 4.3425 | 6.0 | 36 | 6.9461 | 0.0 | 5.0 |
59
- | 4.5332 | 7.0 | 42 | 6.9481 | 0.0 | 5.0 |
60
- | 4.5802 | 8.0 | 48 | 6.9509 | 0.0 | 5.0 |
61
- | 4.3489 | 9.0 | 54 | 6.9525 | 0.0 | 5.0 |
62
- | 4.5023 | 10.0 | 60 | 6.9545 | 0.0 | 5.0 |
63
- | 4.5163 | 11.0 | 66 | 6.9582 | 0.0 | 5.0 |
64
- | 3.811 | 12.0 | 72 | 6.9592 | 0.0 | 5.0 |
65
- | 4.4632 | 13.0 | 78 | 6.9608 | 0.0 | 5.0 |
66
- | 4.5768 | 14.0 | 84 | 6.9621 | 0.0 | 5.0 |
67
- | 4.9377 | 15.0 | 90 | 6.9637 | 0.0 | 5.0 |
68
- | 4.5133 | 16.0 | 96 | 6.9650 | 0.0 | 5.0 |
69
- | 4.6817 | 17.0 | 102 | 6.9650 | 0.0 | 5.0 |
70
- | 4.7848 | 18.0 | 108 | 6.9658 | 0.0 | 5.0 |
71
- | 4.1093 | 19.0 | 114 | 6.9671 | 0.0 | 5.0 |
72
- | 4.5681 | 20.0 | 120 | 6.9690 | 0.0 | 5.0 |
73
- | 4.7043 | 21.0 | 126 | 6.9708 | 0.0 | 5.0 |
74
- | 4.0004 | 22.0 | 132 | 6.9720 | 0.0 | 5.0 |
75
- | 4.9032 | 23.0 | 138 | 6.9723 | 0.0 | 5.0 |
76
- | 4.5895 | 24.0 | 144 | 6.9722 | 0.0 | 5.0 |
77
- | 4.3594 | 25.0 | 150 | 6.9732 | 0.0 | 5.0 |
78
- | 4.8269 | 26.0 | 156 | 6.9755 | 0.0 | 5.0 |
79
- | 4.4823 | 27.0 | 162 | 6.9776 | 0.0 | 5.0 |
80
- | 4.6239 | 28.0 | 168 | 6.9794 | 0.0 | 5.0 |
81
- | 4.1998 | 29.0 | 174 | 6.9804 | 0.0 | 5.0 |
82
- | 3.9999 | 30.0 | 180 | 6.9819 | 0.0 | 5.0 |
83
- | 4.5411 | 31.0 | 186 | 6.9831 | 0.0 | 5.0 |
84
- | 4.3902 | 32.0 | 192 | 6.9841 | 0.0 | 5.0 |
85
- | 5.4393 | 33.0 | 198 | 6.9849 | 0.0 | 5.0 |
86
- | 4.5883 | 34.0 | 204 | 6.9856 | 0.0 | 5.0 |
87
- | 4.2695 | 35.0 | 210 | 6.9862 | 0.0 | 5.0 |
88
- | 5.4745 | 36.0 | 216 | 6.9866 | 0.0 | 5.0 |
89
- | 4.8483 | 37.0 | 222 | 6.9871 | 0.0 | 5.0 |
90
- | 4.9538 | 38.0 | 228 | 6.9879 | 0.0 | 5.0 |
91
- | 4.6334 | 39.0 | 234 | 6.9887 | 0.0 | 5.0 |
92
- | 4.1038 | 40.0 | 240 | 6.9894 | 0.0 | 5.0 |
93
- | 5.501 | 41.0 | 246 | 6.9900 | 0.0 | 5.0 |
94
- | 4.445 | 42.0 | 252 | 6.9905 | 0.0 | 5.0 |
95
- | 3.8749 | 43.0 | 258 | 6.9906 | 0.0 | 5.0 |
96
- | 3.9839 | 44.0 | 264 | 6.9907 | 0.0 | 5.0 |
97
- | 4.4551 | 45.0 | 270 | 6.9907 | 0.0 | 5.0 |
98
- | 4.5297 | 46.0 | 276 | 6.9907 | 0.0 | 5.0 |
99
- | 4.7603 | 47.0 | 282 | 6.9906 | 0.0 | 5.0 |
100
- | 4.7698 | 48.0 | 288 | 6.9907 | 0.0 | 5.0 |
101
- | 4.3381 | 49.0 | 294 | 6.9907 | 0.0 | 5.0 |
102
- | 4.6124 | 50.0 | 300 | 6.9907 | 0.0 | 5.0 |
103
 
104
 
105
  ### Framework versions
106
 
107
- - Transformers 4.35.2
108
- - Pytorch 1.13.1+cu117
109
- - Datasets 2.16.1
110
- - Tokenizers 0.15.0
 
17
 
18
  This model is a fine-tuned version of [facebook/m2m100_1.2B](https://huggingface.co/facebook/m2m100_1.2B) on an unknown dataset.
19
  It achieves the following results on the evaluation set:
20
+ - Loss: 2.3546
21
+ - Bleu: 46.5499
22
+ - Gen Len: 19.8571
23
 
24
  ## Model description
25
 
 
48
 
49
  ### Training results
50
 
51
+ | Training Loss | Epoch | Step | Validation Loss | Bleu | Gen Len |
52
+ |:-------------:|:-----:|:----:|:---------------:|:-------:|:-------:|
53
+ | 0.0 | 1.0 | 6 | 2.4739 | 49.3833 | 19.8571 |
54
+ | 0.0 | 2.0 | 12 | 2.4967 | 47.4622 | 20.7143 |
55
+ | 0.0087 | 3.0 | 18 | 2.6016 | 47.8384 | 20.9524 |
56
+ | 0.0 | 4.0 | 24 | 2.6004 | 49.8858 | 19.9048 |
57
+ | 0.0825 | 5.0 | 30 | 2.4731 | 50.7434 | 19.9524 |
58
+ | 0.0 | 6.0 | 36 | 2.4229 | 45.2602 | 20.7619 |
59
+ | 0.0002 | 7.0 | 42 | 2.4148 | 45.5274 | 20.5238 |
60
+ | 0.0001 | 8.0 | 48 | 2.3583 | 47.4096 | 19.9524 |
61
+ | 0.0 | 9.0 | 54 | 2.3559 | 49.1212 | 20.1905 |
62
+ | 0.0 | 10.0 | 60 | 2.3610 | 47.0296 | 20.0952 |
63
+ | 0.0001 | 11.0 | 66 | 2.3423 | 47.2022 | 19.8571 |
64
+ | 0.0002 | 12.0 | 72 | 2.2938 | 48.5473 | 20.0952 |
65
+ | 0.0 | 13.0 | 78 | 2.2591 | 49.6382 | 19.4762 |
66
+ | 0.0001 | 14.0 | 84 | 2.2492 | 49.5102 | 19.6667 |
67
+ | 0.0001 | 15.0 | 90 | 2.2740 | 49.1707 | 19.6667 |
68
+ | 0.0 | 16.0 | 96 | 2.2876 | 48.9631 | 19.3333 |
69
+ | 0.0023 | 17.0 | 102 | 2.2842 | 48.7639 | 19.6667 |
70
+ | 0.0001 | 18.0 | 108 | 2.2830 | 45.9993 | 19.5238 |
71
+ | 0.0 | 19.0 | 114 | 2.2872 | 49.1391 | 19.7619 |
72
+ | 0.0 | 20.0 | 120 | 2.2893 | 49.1623 | 19.8095 |
73
+ | 0.0 | 21.0 | 126 | 2.2948 | 48.5803 | 20.0 |
74
+ | 0.0 | 22.0 | 132 | 2.3048 | 48.9732 | 20.0476 |
75
+ | 0.0 | 23.0 | 138 | 2.3114 | 49.1156 | 19.9524 |
76
+ | 0.0 | 24.0 | 144 | 2.3169 | 49.1156 | 19.9524 |
77
+ | 0.0 | 25.0 | 150 | 2.3202 | 48.4435 | 20.0 |
78
+ | 0.0 | 26.0 | 156 | 2.3227 | 48.4435 | 20.0 |
79
+ | 0.0 | 27.0 | 162 | 2.3236 | 48.4435 | 20.0 |
80
+ | 0.0 | 28.0 | 168 | 2.3244 | 48.4435 | 20.0 |
81
+ | 0.0 | 29.0 | 174 | 2.3268 | 48.4435 | 20.0 |
82
+ | 0.0002 | 30.0 | 180 | 2.3296 | 45.9582 | 19.8571 |
83
+ | 0.0 | 31.0 | 186 | 2.3319 | 45.9582 | 19.8571 |
84
+ | 0.0 | 32.0 | 192 | 2.3338 | 45.9582 | 19.8571 |
85
+ | 0.0 | 33.0 | 198 | 2.3401 | 46.8428 | 19.8571 |
86
+ | 0.0 | 34.0 | 204 | 2.3473 | 46.586 | 19.8095 |
87
+ | 0.0001 | 35.0 | 210 | 2.3513 | 46.586 | 19.8095 |
88
+ | 0.0 | 36.0 | 216 | 2.3539 | 48.1767 | 20.0476 |
89
+ | 0.0 | 37.0 | 222 | 2.3554 | 48.1966 | 19.9048 |
90
+ | 0.0 | 38.0 | 228 | 2.3563 | 48.1966 | 19.9048 |
91
+ | 0.0 | 39.0 | 234 | 2.3563 | 48.1966 | 19.9048 |
92
+ | 0.0 | 40.0 | 240 | 2.3550 | 46.5682 | 19.8095 |
93
+ | 0.0001 | 41.0 | 246 | 2.3541 | 46.5499 | 19.9524 |
94
+ | 0.0 | 42.0 | 252 | 2.3534 | 46.5499 | 19.8571 |
95
+ | 0.0001 | 43.0 | 258 | 2.3533 | 46.5499 | 19.8571 |
96
+ | 0.0 | 44.0 | 264 | 2.3533 | 46.5499 | 19.8571 |
97
+ | 0.0 | 45.0 | 270 | 2.3537 | 46.5499 | 19.8571 |
98
+ | 0.0001 | 46.0 | 276 | 2.3540 | 46.5499 | 19.8571 |
99
+ | 0.0 | 47.0 | 282 | 2.3543 | 46.5499 | 19.8571 |
100
+ | 0.0 | 48.0 | 288 | 2.3544 | 46.5499 | 19.8571 |
101
+ | 0.0 | 49.0 | 294 | 2.3545 | 46.5499 | 19.8571 |
102
+ | 0.0 | 50.0 | 300 | 2.3546 | 46.5499 | 19.8571 |
103
 
104
 
105
  ### Framework versions
106
 
107
+ - Transformers 4.38.2
108
+ - Pytorch 2.1.0+cu121
109
+ - Datasets 2.18.0
110
+ - Tokenizers 0.15.2
config.json CHANGED
@@ -31,7 +31,7 @@
31
  "pad_token_id": 1,
32
  "scale_embedding": true,
33
  "torch_dtype": "float32",
34
- "transformers_version": "4.35.2",
35
  "use_cache": true,
36
  "vocab_size": 128112
37
  }
 
31
  "pad_token_id": 1,
32
  "scale_embedding": true,
33
  "torch_dtype": "float32",
34
+ "transformers_version": "4.38.2",
35
  "use_cache": true,
36
  "vocab_size": 128112
37
  }
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  "max_length": 200,
7
  "num_beams": 5,
8
  "pad_token_id": 1,
9
- "transformers_version": "4.35.2"
10
  }
 
6
  "max_length": 200,
7
  "num_beams": 5,
8
  "pad_token_id": 1,
9
+ "transformers_version": "4.38.2"
10
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6549cbbe5b46b340eeb8faa7a0369178ae0ecb6fb9b2a60dede8532c7c4a4ef3
3
  size 4958000808
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7966f7f688730792e87d02c20f1f4db1bc2dd02333e59af1ebe0bb9980085227
3
  size 4958000808
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fbd0175e25742a2a496acd9709683897e1dc5278aaa68a1d11984c4bfc20425b
3
- size 4283
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85846beca15541da568916465f42c9e1165530ace2c48e1c2d190b71c42a7f94
3
+ size 5048