salbatarni commited on
Commit
5dbe9ae
1 Parent(s): be32b2e

Training in progress, step 340

Browse files
Files changed (19) hide show
  1. README.md +175 -175
  2. runs/Aug25_05-41-07_358cdae836c1/events.out.tfevents.1724564469.358cdae836c1.24.0 +3 -0
  3. runs/Aug25_06-04-04_358cdae836c1/events.out.tfevents.1724565845.358cdae836c1.24.1 +3 -0
  4. runs/Aug25_06-27-02_358cdae836c1/events.out.tfevents.1724567223.358cdae836c1.24.2 +3 -0
  5. runs/Aug25_06-50-00_358cdae836c1/events.out.tfevents.1724568601.358cdae836c1.24.3 +3 -0
  6. runs/Aug25_07-12-58_358cdae836c1/events.out.tfevents.1724569978.358cdae836c1.24.4 +3 -0
  7. runs/Aug25_07-35-55_358cdae836c1/events.out.tfevents.1724571356.358cdae836c1.24.5 +3 -0
  8. runs/Aug25_07-59-55_358cdae836c1/events.out.tfevents.1724572795.358cdae836c1.24.6 +3 -0
  9. runs/Aug25_08-24-05_358cdae836c1/events.out.tfevents.1724574246.358cdae836c1.24.7 +3 -0
  10. runs/Aug25_08-48-17_358cdae836c1/events.out.tfevents.1724575698.358cdae836c1.24.8 +3 -0
  11. runs/Aug25_09-12-24_358cdae836c1/events.out.tfevents.1724577145.358cdae836c1.24.9 +3 -0
  12. runs/Aug25_09-36-31_358cdae836c1/events.out.tfevents.1724578591.358cdae836c1.24.10 +3 -0
  13. runs/Aug25_10-01-23_358cdae836c1/events.out.tfevents.1724580084.358cdae836c1.24.11 +3 -0
  14. runs/Aug25_10-26-23_358cdae836c1/events.out.tfevents.1724581584.358cdae836c1.24.12 +3 -0
  15. runs/Aug25_10-51-26_358cdae836c1/events.out.tfevents.1724583087.358cdae836c1.24.13 +3 -0
  16. runs/Aug25_11-16-26_358cdae836c1/events.out.tfevents.1724584587.358cdae836c1.24.14 +3 -0
  17. runs/Aug25_11-41-27_358cdae836c1/events.out.tfevents.1724586087.358cdae836c1.24.15 +3 -0
  18. runs/Aug25_12-06-23_358cdae836c1/events.out.tfevents.1724587584.358cdae836c1.24.16 +3 -0
  19. training_args.bin +1 -1
README.md CHANGED
@@ -4,20 +4,20 @@ base_model: google-bert/bert-base-cased
4
  tags:
5
  - generated_from_trainer
6
  model-index:
7
- - name: bert_baseline_prompt_adherence_task6_fold1
8
  results: []
9
  ---
10
 
11
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
  should probably proofread and complete it, then remove this comment. -->
13
 
14
- # bert_baseline_prompt_adherence_task6_fold1
15
 
16
  This model is a fine-tuned version of [google-bert/bert-base-cased](https://huggingface.co/google-bert/bert-base-cased) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 0.3161
19
- - Qwk: 0.7906
20
- - Mse: 0.3161
21
 
22
  ## Model description
23
 
@@ -48,176 +48,176 @@ The following hyperparameters were used during training:
48
 
49
  | Training Loss | Epoch | Step | Validation Loss | Qwk | Mse |
50
  |:-------------:|:------:|:----:|:---------------:|:-------:|:------:|
51
- | No log | 0.0294 | 2 | 1.6151 | 0.0 | 1.6151 |
52
- | No log | 0.0588 | 4 | 1.4106 | -0.0795 | 1.4106 |
53
- | No log | 0.0882 | 6 | 1.2423 | 0.0040 | 1.2423 |
54
- | No log | 0.1176 | 8 | 1.0271 | 0.0 | 1.0271 |
55
- | No log | 0.1471 | 10 | 0.9151 | 0.0 | 0.9151 |
56
- | No log | 0.1765 | 12 | 0.8539 | 0.0 | 0.8539 |
57
- | No log | 0.2059 | 14 | 0.8101 | 0.0230 | 0.8101 |
58
- | No log | 0.2353 | 16 | 0.7750 | 0.1954 | 0.7750 |
59
- | No log | 0.2647 | 18 | 0.7123 | 0.3940 | 0.7123 |
60
- | No log | 0.2941 | 20 | 0.7003 | 0.4145 | 0.7003 |
61
- | No log | 0.3235 | 22 | 0.6926 | 0.4761 | 0.6926 |
62
- | No log | 0.3529 | 24 | 0.5855 | 0.5196 | 0.5855 |
63
- | No log | 0.3824 | 26 | 0.6844 | 0.5734 | 0.6844 |
64
- | No log | 0.4118 | 28 | 0.5221 | 0.6010 | 0.5221 |
65
- | No log | 0.4412 | 30 | 0.6061 | 0.4082 | 0.6061 |
66
- | No log | 0.4706 | 32 | 0.7027 | 0.3430 | 0.7027 |
67
- | No log | 0.5 | 34 | 0.4452 | 0.5202 | 0.4452 |
68
- | No log | 0.5294 | 36 | 0.7128 | 0.6240 | 0.7128 |
69
- | No log | 0.5588 | 38 | 0.8934 | 0.5902 | 0.8934 |
70
- | No log | 0.5882 | 40 | 0.7046 | 0.6221 | 0.7046 |
71
- | No log | 0.6176 | 42 | 0.4068 | 0.6459 | 0.4068 |
72
- | No log | 0.6471 | 44 | 0.3781 | 0.5855 | 0.3781 |
73
- | No log | 0.6765 | 46 | 0.3648 | 0.6206 | 0.3648 |
74
- | No log | 0.7059 | 48 | 0.4084 | 0.6868 | 0.4084 |
75
- | No log | 0.7353 | 50 | 0.4624 | 0.6694 | 0.4624 |
76
- | No log | 0.7647 | 52 | 0.3985 | 0.6852 | 0.3985 |
77
- | No log | 0.7941 | 54 | 0.3451 | 0.6932 | 0.3451 |
78
- | No log | 0.8235 | 56 | 0.3168 | 0.6909 | 0.3168 |
79
- | No log | 0.8529 | 58 | 0.3904 | 0.7478 | 0.3904 |
80
- | No log | 0.8824 | 60 | 0.4429 | 0.7478 | 0.4429 |
81
- | No log | 0.9118 | 62 | 0.5282 | 0.7285 | 0.5282 |
82
- | No log | 0.9412 | 64 | 0.4570 | 0.7181 | 0.4570 |
83
- | No log | 0.9706 | 66 | 0.3236 | 0.6996 | 0.3236 |
84
- | No log | 1.0 | 68 | 0.3754 | 0.5678 | 0.3754 |
85
- | No log | 1.0294 | 70 | 0.3449 | 0.6155 | 0.3449 |
86
- | No log | 1.0588 | 72 | 0.3238 | 0.7088 | 0.3238 |
87
- | No log | 1.0882 | 74 | 0.3711 | 0.7180 | 0.3711 |
88
- | No log | 1.1176 | 76 | 0.3393 | 0.7215 | 0.3393 |
89
- | No log | 1.1471 | 78 | 0.3086 | 0.7084 | 0.3086 |
90
- | No log | 1.1765 | 80 | 0.2901 | 0.6634 | 0.2901 |
91
- | No log | 1.2059 | 82 | 0.2863 | 0.6687 | 0.2863 |
92
- | No log | 1.2353 | 84 | 0.3145 | 0.7296 | 0.3145 |
93
- | No log | 1.2647 | 86 | 0.3167 | 0.7430 | 0.3167 |
94
- | No log | 1.2941 | 88 | 0.2960 | 0.7263 | 0.2960 |
95
- | No log | 1.3235 | 90 | 0.3011 | 0.6501 | 0.3011 |
96
- | No log | 1.3529 | 92 | 0.2984 | 0.6395 | 0.2984 |
97
- | No log | 1.3824 | 94 | 0.2845 | 0.7058 | 0.2845 |
98
- | No log | 1.4118 | 96 | 0.2914 | 0.7164 | 0.2914 |
99
- | No log | 1.4412 | 98 | 0.3352 | 0.7683 | 0.3352 |
100
- | No log | 1.4706 | 100 | 0.2973 | 0.7189 | 0.2973 |
101
- | No log | 1.5 | 102 | 0.2804 | 0.6710 | 0.2804 |
102
- | No log | 1.5294 | 104 | 0.2802 | 0.6757 | 0.2802 |
103
- | No log | 1.5588 | 106 | 0.2827 | 0.6976 | 0.2827 |
104
- | No log | 1.5882 | 108 | 0.2852 | 0.7202 | 0.2852 |
105
- | No log | 1.6176 | 110 | 0.2725 | 0.6922 | 0.2725 |
106
- | No log | 1.6471 | 112 | 0.3001 | 0.7581 | 0.3001 |
107
- | No log | 1.6765 | 114 | 0.4394 | 0.7953 | 0.4394 |
108
- | No log | 1.7059 | 116 | 0.4924 | 0.7985 | 0.4924 |
109
- | No log | 1.7353 | 118 | 0.3474 | 0.7937 | 0.3474 |
110
- | No log | 1.7647 | 120 | 0.2708 | 0.6950 | 0.2708 |
111
- | No log | 1.7941 | 122 | 0.3139 | 0.5932 | 0.3139 |
112
- | No log | 1.8235 | 124 | 0.2859 | 0.6374 | 0.2859 |
113
- | No log | 1.8529 | 126 | 0.2993 | 0.7651 | 0.2993 |
114
- | No log | 1.8824 | 128 | 0.3569 | 0.7880 | 0.3569 |
115
- | No log | 1.9118 | 130 | 0.3548 | 0.7866 | 0.3548 |
116
- | No log | 1.9412 | 132 | 0.3157 | 0.7744 | 0.3157 |
117
- | No log | 1.9706 | 134 | 0.2844 | 0.7234 | 0.2844 |
118
- | No log | 2.0 | 136 | 0.2975 | 0.7440 | 0.2975 |
119
- | No log | 2.0294 | 138 | 0.3519 | 0.7765 | 0.3519 |
120
- | No log | 2.0588 | 140 | 0.3338 | 0.7641 | 0.3338 |
121
- | No log | 2.0882 | 142 | 0.2962 | 0.7337 | 0.2962 |
122
- | No log | 2.1176 | 144 | 0.3145 | 0.7499 | 0.3145 |
123
- | No log | 2.1471 | 146 | 0.2990 | 0.7417 | 0.2990 |
124
- | No log | 2.1765 | 148 | 0.3092 | 0.7519 | 0.3092 |
125
- | No log | 2.2059 | 150 | 0.3808 | 0.7899 | 0.3808 |
126
- | No log | 2.2353 | 152 | 0.4696 | 0.8002 | 0.4696 |
127
- | No log | 2.2647 | 154 | 0.4097 | 0.7952 | 0.4097 |
128
- | No log | 2.2941 | 156 | 0.3007 | 0.7522 | 0.3007 |
129
- | No log | 2.3235 | 158 | 0.2889 | 0.6726 | 0.2889 |
130
- | No log | 2.3529 | 160 | 0.2786 | 0.6735 | 0.2786 |
131
- | No log | 2.3824 | 162 | 0.2892 | 0.7493 | 0.2892 |
132
- | No log | 2.4118 | 164 | 0.3698 | 0.7767 | 0.3698 |
133
- | No log | 2.4412 | 166 | 0.3549 | 0.7803 | 0.3549 |
134
- | No log | 2.4706 | 168 | 0.2865 | 0.7455 | 0.2865 |
135
- | No log | 2.5 | 170 | 0.2781 | 0.6629 | 0.2781 |
136
- | No log | 2.5294 | 172 | 0.2737 | 0.6666 | 0.2737 |
137
- | No log | 2.5588 | 174 | 0.2826 | 0.7476 | 0.2826 |
138
- | No log | 2.5882 | 176 | 0.3007 | 0.7664 | 0.3007 |
139
- | No log | 2.6176 | 178 | 0.2888 | 0.7527 | 0.2888 |
140
- | No log | 2.6471 | 180 | 0.2990 | 0.7694 | 0.2990 |
141
- | No log | 2.6765 | 182 | 0.2836 | 0.7441 | 0.2836 |
142
- | No log | 2.7059 | 184 | 0.2857 | 0.7497 | 0.2857 |
143
- | No log | 2.7353 | 186 | 0.3133 | 0.7761 | 0.3133 |
144
- | No log | 2.7647 | 188 | 0.3104 | 0.7793 | 0.3104 |
145
- | No log | 2.7941 | 190 | 0.3281 | 0.7806 | 0.3281 |
146
- | No log | 2.8235 | 192 | 0.3503 | 0.7857 | 0.3503 |
147
- | No log | 2.8529 | 194 | 0.3320 | 0.7875 | 0.3320 |
148
- | No log | 2.8824 | 196 | 0.3020 | 0.7790 | 0.3020 |
149
- | No log | 2.9118 | 198 | 0.2909 | 0.7768 | 0.2909 |
150
- | No log | 2.9412 | 200 | 0.2682 | 0.7391 | 0.2682 |
151
- | No log | 2.9706 | 202 | 0.2727 | 0.7557 | 0.2727 |
152
- | No log | 3.0 | 204 | 0.2944 | 0.7772 | 0.2944 |
153
- | No log | 3.0294 | 206 | 0.2866 | 0.7683 | 0.2866 |
154
- | No log | 3.0588 | 208 | 0.2632 | 0.7245 | 0.2632 |
155
- | No log | 3.0882 | 210 | 0.2619 | 0.7136 | 0.2619 |
156
- | No log | 3.1176 | 212 | 0.2705 | 0.7498 | 0.2705 |
157
- | No log | 3.1471 | 214 | 0.3193 | 0.7922 | 0.3193 |
158
- | No log | 3.1765 | 216 | 0.3628 | 0.7939 | 0.3628 |
159
- | No log | 3.2059 | 218 | 0.3372 | 0.7949 | 0.3372 |
160
- | No log | 3.2353 | 220 | 0.2825 | 0.7580 | 0.2825 |
161
- | No log | 3.2647 | 222 | 0.2762 | 0.7258 | 0.2762 |
162
- | No log | 3.2941 | 224 | 0.3021 | 0.7518 | 0.3021 |
163
- | No log | 3.3235 | 226 | 0.3377 | 0.7766 | 0.3377 |
164
- | No log | 3.3529 | 228 | 0.3544 | 0.7879 | 0.3544 |
165
- | No log | 3.3824 | 230 | 0.3219 | 0.7753 | 0.3219 |
166
- | No log | 3.4118 | 232 | 0.2743 | 0.7411 | 0.2743 |
167
- | No log | 3.4412 | 234 | 0.2715 | 0.6748 | 0.2715 |
168
- | No log | 3.4706 | 236 | 0.2699 | 0.6761 | 0.2699 |
169
- | No log | 3.5 | 238 | 0.2729 | 0.7429 | 0.2729 |
170
- | No log | 3.5294 | 240 | 0.3071 | 0.7842 | 0.3071 |
171
- | No log | 3.5588 | 242 | 0.3847 | 0.7929 | 0.3847 |
172
- | No log | 3.5882 | 244 | 0.4086 | 0.8056 | 0.4086 |
173
- | No log | 3.6176 | 246 | 0.3671 | 0.7958 | 0.3671 |
174
- | No log | 3.6471 | 248 | 0.3053 | 0.7729 | 0.3053 |
175
- | No log | 3.6765 | 250 | 0.2944 | 0.7647 | 0.2944 |
176
- | No log | 3.7059 | 252 | 0.2813 | 0.7453 | 0.2813 |
177
- | No log | 3.7353 | 254 | 0.2833 | 0.7548 | 0.2833 |
178
- | No log | 3.7647 | 256 | 0.2901 | 0.7628 | 0.2901 |
179
- | No log | 3.7941 | 258 | 0.3030 | 0.7803 | 0.3030 |
180
- | No log | 3.8235 | 260 | 0.2933 | 0.7719 | 0.2933 |
181
- | No log | 3.8529 | 262 | 0.2779 | 0.7592 | 0.2779 |
182
- | No log | 3.8824 | 264 | 0.2736 | 0.7546 | 0.2736 |
183
- | No log | 3.9118 | 266 | 0.2770 | 0.7631 | 0.2770 |
184
- | No log | 3.9412 | 268 | 0.2862 | 0.7772 | 0.2862 |
185
- | No log | 3.9706 | 270 | 0.3061 | 0.7908 | 0.3061 |
186
- | No log | 4.0 | 272 | 0.3413 | 0.7934 | 0.3413 |
187
- | No log | 4.0294 | 274 | 0.3580 | 0.7922 | 0.3580 |
188
- | No log | 4.0588 | 276 | 0.3543 | 0.8027 | 0.3543 |
189
- | No log | 4.0882 | 278 | 0.3370 | 0.7881 | 0.3370 |
190
- | No log | 4.1176 | 280 | 0.3156 | 0.7881 | 0.3156 |
191
- | No log | 4.1471 | 282 | 0.2915 | 0.7791 | 0.2915 |
192
- | No log | 4.1765 | 284 | 0.2874 | 0.7772 | 0.2874 |
193
- | No log | 4.2059 | 286 | 0.2806 | 0.7640 | 0.2806 |
194
- | No log | 4.2353 | 288 | 0.2824 | 0.7680 | 0.2824 |
195
- | No log | 4.2647 | 290 | 0.2809 | 0.7640 | 0.2809 |
196
- | No log | 4.2941 | 292 | 0.2782 | 0.7565 | 0.2782 |
197
- | No log | 4.3235 | 294 | 0.2840 | 0.7726 | 0.2840 |
198
- | No log | 4.3529 | 296 | 0.2928 | 0.7838 | 0.2928 |
199
- | No log | 4.3824 | 298 | 0.3009 | 0.7851 | 0.3009 |
200
- | No log | 4.4118 | 300 | 0.3178 | 0.7887 | 0.3178 |
201
- | No log | 4.4412 | 302 | 0.3262 | 0.7949 | 0.3262 |
202
- | No log | 4.4706 | 304 | 0.3206 | 0.7909 | 0.3206 |
203
- | No log | 4.5 | 306 | 0.3096 | 0.7869 | 0.3096 |
204
- | No log | 4.5294 | 308 | 0.3008 | 0.7839 | 0.3008 |
205
- | No log | 4.5588 | 310 | 0.2935 | 0.7759 | 0.2935 |
206
- | No log | 4.5882 | 312 | 0.2916 | 0.7713 | 0.2916 |
207
- | No log | 4.6176 | 314 | 0.2985 | 0.7819 | 0.2985 |
208
- | No log | 4.6471 | 316 | 0.3014 | 0.7833 | 0.3014 |
209
- | No log | 4.6765 | 318 | 0.2994 | 0.7820 | 0.2994 |
210
- | No log | 4.7059 | 320 | 0.3011 | 0.7833 | 0.3011 |
211
- | No log | 4.7353 | 322 | 0.3068 | 0.7860 | 0.3068 |
212
- | No log | 4.7647 | 324 | 0.3103 | 0.7860 | 0.3103 |
213
- | No log | 4.7941 | 326 | 0.3150 | 0.7931 | 0.3150 |
214
- | No log | 4.8235 | 328 | 0.3163 | 0.7931 | 0.3163 |
215
- | No log | 4.8529 | 330 | 0.3170 | 0.7918 | 0.3170 |
216
- | No log | 4.8824 | 332 | 0.3167 | 0.7908 | 0.3167 |
217
- | No log | 4.9118 | 334 | 0.3165 | 0.7908 | 0.3165 |
218
- | No log | 4.9412 | 336 | 0.3156 | 0.7908 | 0.3156 |
219
- | No log | 4.9706 | 338 | 0.3158 | 0.7899 | 0.3158 |
220
- | No log | 5.0 | 340 | 0.3161 | 0.7906 | 0.3161 |
221
 
222
 
223
  ### Framework versions
 
4
  tags:
5
  - generated_from_trainer
6
  model-index:
7
+ - name: bert_baseline_prompt_adherence_task6_fold0
8
  results: []
9
  ---
10
 
11
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
  should probably proofread and complete it, then remove this comment. -->
13
 
14
+ # bert_baseline_prompt_adherence_task6_fold0
15
 
16
  This model is a fine-tuned version of [google-bert/bert-base-cased](https://huggingface.co/google-bert/bert-base-cased) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 0.3214
19
+ - Qwk: 0.7303
20
+ - Mse: 0.3214
21
 
22
  ## Model description
23
 
 
48
 
49
  | Training Loss | Epoch | Step | Validation Loss | Qwk | Mse |
50
  |:-------------:|:------:|:----:|:---------------:|:-------:|:------:|
51
+ | No log | 0.0294 | 2 | 1.9024 | 0.0 | 1.9024 |
52
+ | No log | 0.0588 | 4 | 1.6433 | -0.0376 | 1.6433 |
53
+ | No log | 0.0882 | 6 | 1.4580 | -0.0106 | 1.4580 |
54
+ | No log | 0.1176 | 8 | 1.1646 | 0.0061 | 1.1646 |
55
+ | No log | 0.1471 | 10 | 0.9673 | 0.0061 | 0.9673 |
56
+ | No log | 0.1765 | 12 | 0.8529 | 0.0061 | 0.8529 |
57
+ | No log | 0.2059 | 14 | 0.8166 | 0.1658 | 0.8166 |
58
+ | No log | 0.2353 | 16 | 0.8014 | 0.0430 | 0.8014 |
59
+ | No log | 0.2647 | 18 | 0.7507 | 0.2430 | 0.7507 |
60
+ | No log | 0.2941 | 20 | 0.6900 | 0.3980 | 0.6900 |
61
+ | No log | 0.3235 | 22 | 0.6517 | 0.4566 | 0.6517 |
62
+ | No log | 0.3529 | 24 | 0.6213 | 0.4542 | 0.6213 |
63
+ | No log | 0.3824 | 26 | 0.5886 | 0.3707 | 0.5886 |
64
+ | No log | 0.4118 | 28 | 0.5738 | 0.3586 | 0.5738 |
65
+ | No log | 0.4412 | 30 | 0.5467 | 0.4152 | 0.5467 |
66
+ | No log | 0.4706 | 32 | 0.5428 | 0.3809 | 0.5428 |
67
+ | No log | 0.5 | 34 | 0.4846 | 0.4818 | 0.4846 |
68
+ | No log | 0.5294 | 36 | 0.4315 | 0.4939 | 0.4315 |
69
+ | No log | 0.5588 | 38 | 0.4162 | 0.5244 | 0.4162 |
70
+ | No log | 0.5882 | 40 | 0.3789 | 0.5985 | 0.3789 |
71
+ | No log | 0.6176 | 42 | 0.3613 | 0.5956 | 0.3613 |
72
+ | No log | 0.6471 | 44 | 0.3505 | 0.5933 | 0.3505 |
73
+ | No log | 0.6765 | 46 | 0.3581 | 0.6419 | 0.3581 |
74
+ | No log | 0.7059 | 48 | 0.3340 | 0.6165 | 0.3340 |
75
+ | No log | 0.7353 | 50 | 0.3323 | 0.6190 | 0.3323 |
76
+ | No log | 0.7647 | 52 | 0.3927 | 0.7193 | 0.3927 |
77
+ | No log | 0.7941 | 54 | 0.4713 | 0.7380 | 0.4713 |
78
+ | No log | 0.8235 | 56 | 0.3768 | 0.6750 | 0.3768 |
79
+ | No log | 0.8529 | 58 | 0.3531 | 0.5832 | 0.3531 |
80
+ | No log | 0.8824 | 60 | 0.4043 | 0.5390 | 0.4043 |
81
+ | No log | 0.9118 | 62 | 0.3447 | 0.5850 | 0.3447 |
82
+ | No log | 0.9412 | 64 | 0.3618 | 0.6575 | 0.3618 |
83
+ | No log | 0.9706 | 66 | 0.3572 | 0.6539 | 0.3572 |
84
+ | No log | 1.0 | 68 | 0.3949 | 0.6765 | 0.3949 |
85
+ | No log | 1.0294 | 70 | 0.3686 | 0.6660 | 0.3686 |
86
+ | No log | 1.0588 | 72 | 0.3326 | 0.6180 | 0.3326 |
87
+ | No log | 1.0882 | 74 | 0.3520 | 0.5823 | 0.3520 |
88
+ | No log | 1.1176 | 76 | 0.3303 | 0.6063 | 0.3303 |
89
+ | No log | 1.1471 | 78 | 0.3475 | 0.6601 | 0.3475 |
90
+ | No log | 1.1765 | 80 | 0.3729 | 0.6752 | 0.3729 |
91
+ | No log | 1.2059 | 82 | 0.3223 | 0.6533 | 0.3223 |
92
+ | No log | 1.2353 | 84 | 0.3665 | 0.5588 | 0.3665 |
93
+ | No log | 1.2647 | 86 | 0.3824 | 0.5485 | 0.3824 |
94
+ | No log | 1.2941 | 88 | 0.3093 | 0.6230 | 0.3093 |
95
+ | No log | 1.3235 | 90 | 0.3160 | 0.6514 | 0.3160 |
96
+ | No log | 1.3529 | 92 | 0.3212 | 0.6588 | 0.3212 |
97
+ | No log | 1.3824 | 94 | 0.3075 | 0.6459 | 0.3075 |
98
+ | No log | 1.4118 | 96 | 0.3146 | 0.6141 | 0.3146 |
99
+ | No log | 1.4412 | 98 | 0.3140 | 0.6051 | 0.3140 |
100
+ | No log | 1.4706 | 100 | 0.2968 | 0.6409 | 0.2968 |
101
+ | No log | 1.5 | 102 | 0.3146 | 0.6665 | 0.3146 |
102
+ | No log | 1.5294 | 104 | 0.3225 | 0.6744 | 0.3225 |
103
+ | No log | 1.5588 | 106 | 0.2963 | 0.6660 | 0.2963 |
104
+ | No log | 1.5882 | 108 | 0.3015 | 0.6202 | 0.3015 |
105
+ | No log | 1.6176 | 110 | 0.3128 | 0.6101 | 0.3128 |
106
+ | No log | 1.6471 | 112 | 0.2930 | 0.6835 | 0.2930 |
107
+ | No log | 1.6765 | 114 | 0.3211 | 0.7509 | 0.3211 |
108
+ | No log | 1.7059 | 116 | 0.3024 | 0.7304 | 0.3024 |
109
+ | No log | 1.7353 | 118 | 0.2830 | 0.6659 | 0.2830 |
110
+ | No log | 1.7647 | 120 | 0.2853 | 0.6455 | 0.2853 |
111
+ | No log | 1.7941 | 122 | 0.2959 | 0.7087 | 0.2959 |
112
+ | No log | 1.8235 | 124 | 0.3210 | 0.7212 | 0.3210 |
113
+ | No log | 1.8529 | 126 | 0.3687 | 0.7455 | 0.3687 |
114
+ | No log | 1.8824 | 128 | 0.3281 | 0.7077 | 0.3281 |
115
+ | No log | 1.9118 | 130 | 0.2932 | 0.6235 | 0.2932 |
116
+ | No log | 1.9412 | 132 | 0.3188 | 0.5858 | 0.3188 |
117
+ | No log | 1.9706 | 134 | 0.3395 | 0.5668 | 0.3395 |
118
+ | No log | 2.0 | 136 | 0.3031 | 0.5998 | 0.3031 |
119
+ | No log | 2.0294 | 138 | 0.2965 | 0.6165 | 0.2965 |
120
+ | No log | 2.0588 | 140 | 0.2870 | 0.6407 | 0.2870 |
121
+ | No log | 2.0882 | 142 | 0.2971 | 0.6951 | 0.2971 |
122
+ | No log | 2.1176 | 144 | 0.3088 | 0.7183 | 0.3088 |
123
+ | No log | 2.1471 | 146 | 0.2953 | 0.6786 | 0.2953 |
124
+ | No log | 2.1765 | 148 | 0.3026 | 0.6304 | 0.3026 |
125
+ | No log | 2.2059 | 150 | 0.2990 | 0.6499 | 0.2990 |
126
+ | No log | 2.2353 | 152 | 0.3100 | 0.6986 | 0.3100 |
127
+ | No log | 2.2647 | 154 | 0.3029 | 0.6558 | 0.3029 |
128
+ | No log | 2.2941 | 156 | 0.3094 | 0.6451 | 0.3094 |
129
+ | No log | 2.3235 | 158 | 0.3189 | 0.6789 | 0.3189 |
130
+ | No log | 2.3529 | 160 | 0.3296 | 0.7205 | 0.3296 |
131
+ | No log | 2.3824 | 162 | 0.3857 | 0.7668 | 0.3857 |
132
+ | No log | 2.4118 | 164 | 0.3847 | 0.7738 | 0.3847 |
133
+ | No log | 2.4412 | 166 | 0.3288 | 0.7196 | 0.3288 |
134
+ | No log | 2.4706 | 168 | 0.3127 | 0.6857 | 0.3127 |
135
+ | No log | 2.5 | 170 | 0.3142 | 0.6321 | 0.3142 |
136
+ | No log | 2.5294 | 172 | 0.2971 | 0.6805 | 0.2971 |
137
+ | No log | 2.5588 | 174 | 0.2919 | 0.6809 | 0.2919 |
138
+ | No log | 2.5882 | 176 | 0.2883 | 0.6832 | 0.2883 |
139
+ | No log | 2.6176 | 178 | 0.2908 | 0.7045 | 0.2908 |
140
+ | No log | 2.6471 | 180 | 0.3183 | 0.7436 | 0.3183 |
141
+ | No log | 2.6765 | 182 | 0.3840 | 0.7915 | 0.3840 |
142
+ | No log | 2.7059 | 184 | 0.4421 | 0.8013 | 0.4421 |
143
+ | No log | 2.7353 | 186 | 0.4693 | 0.8022 | 0.4693 |
144
+ | No log | 2.7647 | 188 | 0.3933 | 0.7893 | 0.3933 |
145
+ | No log | 2.7941 | 190 | 0.3167 | 0.7355 | 0.3167 |
146
+ | No log | 2.8235 | 192 | 0.3053 | 0.6908 | 0.3053 |
147
+ | No log | 2.8529 | 194 | 0.3028 | 0.6759 | 0.3028 |
148
+ | No log | 2.8824 | 196 | 0.2999 | 0.6618 | 0.2999 |
149
+ | No log | 2.9118 | 198 | 0.2966 | 0.6730 | 0.2966 |
150
+ | No log | 2.9412 | 200 | 0.3041 | 0.6986 | 0.3041 |
151
+ | No log | 2.9706 | 202 | 0.3492 | 0.7601 | 0.3492 |
152
+ | No log | 3.0 | 204 | 0.3807 | 0.7895 | 0.3807 |
153
+ | No log | 3.0294 | 206 | 0.3448 | 0.7616 | 0.3448 |
154
+ | No log | 3.0588 | 208 | 0.2938 | 0.7110 | 0.2938 |
155
+ | No log | 3.0882 | 210 | 0.2832 | 0.6748 | 0.2832 |
156
+ | No log | 3.1176 | 212 | 0.2984 | 0.6126 | 0.2984 |
157
+ | No log | 3.1471 | 214 | 0.3016 | 0.6126 | 0.3016 |
158
+ | No log | 3.1765 | 216 | 0.2831 | 0.6494 | 0.2831 |
159
+ | No log | 3.2059 | 218 | 0.2895 | 0.7158 | 0.2895 |
160
+ | No log | 3.2353 | 220 | 0.3130 | 0.7480 | 0.3130 |
161
+ | No log | 3.2647 | 222 | 0.3255 | 0.7594 | 0.3255 |
162
+ | No log | 3.2941 | 224 | 0.3160 | 0.7489 | 0.3160 |
163
+ | No log | 3.3235 | 226 | 0.3049 | 0.7209 | 0.3049 |
164
+ | No log | 3.3529 | 228 | 0.2995 | 0.7190 | 0.2995 |
165
+ | No log | 3.3824 | 230 | 0.3001 | 0.7290 | 0.3001 |
166
+ | No log | 3.4118 | 232 | 0.3108 | 0.7388 | 0.3108 |
167
+ | No log | 3.4412 | 234 | 0.3102 | 0.7363 | 0.3102 |
168
+ | No log | 3.4706 | 236 | 0.3025 | 0.7162 | 0.3025 |
169
+ | No log | 3.5 | 238 | 0.2998 | 0.7035 | 0.2998 |
170
+ | No log | 3.5294 | 240 | 0.3007 | 0.7119 | 0.3007 |
171
+ | No log | 3.5588 | 242 | 0.3111 | 0.7343 | 0.3111 |
172
+ | No log | 3.5882 | 244 | 0.3146 | 0.7327 | 0.3146 |
173
+ | No log | 3.6176 | 246 | 0.2978 | 0.6769 | 0.2978 |
174
+ | No log | 3.6471 | 248 | 0.3000 | 0.6276 | 0.3000 |
175
+ | No log | 3.6765 | 250 | 0.3051 | 0.6143 | 0.3051 |
176
+ | No log | 3.7059 | 252 | 0.3019 | 0.6351 | 0.3019 |
177
+ | No log | 3.7353 | 254 | 0.3106 | 0.7052 | 0.3106 |
178
+ | No log | 3.7647 | 256 | 0.3545 | 0.7642 | 0.3545 |
179
+ | No log | 3.7941 | 258 | 0.3953 | 0.7867 | 0.3953 |
180
+ | No log | 3.8235 | 260 | 0.3837 | 0.7747 | 0.3837 |
181
+ | No log | 3.8529 | 262 | 0.3480 | 0.7461 | 0.3480 |
182
+ | No log | 3.8824 | 264 | 0.3239 | 0.7146 | 0.3239 |
183
+ | No log | 3.9118 | 266 | 0.3220 | 0.6736 | 0.3220 |
184
+ | No log | 3.9412 | 268 | 0.3211 | 0.6705 | 0.3211 |
185
+ | No log | 3.9706 | 270 | 0.3251 | 0.7223 | 0.3251 |
186
+ | No log | 4.0 | 272 | 0.3272 | 0.7300 | 0.3272 |
187
+ | No log | 4.0294 | 274 | 0.3318 | 0.7415 | 0.3318 |
188
+ | No log | 4.0588 | 276 | 0.3398 | 0.7610 | 0.3398 |
189
+ | No log | 4.0882 | 278 | 0.3381 | 0.7626 | 0.3381 |
190
+ | No log | 4.1176 | 280 | 0.3233 | 0.7381 | 0.3233 |
191
+ | No log | 4.1471 | 282 | 0.3073 | 0.7105 | 0.3073 |
192
+ | No log | 4.1765 | 284 | 0.3043 | 0.6954 | 0.3043 |
193
+ | No log | 4.2059 | 286 | 0.3047 | 0.6717 | 0.3047 |
194
+ | No log | 4.2353 | 288 | 0.3047 | 0.6814 | 0.3047 |
195
+ | No log | 4.2647 | 290 | 0.3073 | 0.7041 | 0.3073 |
196
+ | No log | 4.2941 | 292 | 0.3091 | 0.7089 | 0.3091 |
197
+ | No log | 4.3235 | 294 | 0.3070 | 0.7084 | 0.3070 |
198
+ | No log | 4.3529 | 296 | 0.3104 | 0.7163 | 0.3104 |
199
+ | No log | 4.3824 | 298 | 0.3123 | 0.7197 | 0.3123 |
200
+ | No log | 4.4118 | 300 | 0.3140 | 0.7269 | 0.3140 |
201
+ | No log | 4.4412 | 302 | 0.3164 | 0.7296 | 0.3164 |
202
+ | No log | 4.4706 | 304 | 0.3152 | 0.7273 | 0.3152 |
203
+ | No log | 4.5 | 306 | 0.3091 | 0.7130 | 0.3091 |
204
+ | No log | 4.5294 | 308 | 0.3050 | 0.7072 | 0.3050 |
205
+ | No log | 4.5588 | 310 | 0.3048 | 0.7058 | 0.3048 |
206
+ | No log | 4.5882 | 312 | 0.3085 | 0.7116 | 0.3085 |
207
+ | No log | 4.6176 | 314 | 0.3132 | 0.7236 | 0.3132 |
208
+ | No log | 4.6471 | 316 | 0.3216 | 0.7357 | 0.3216 |
209
+ | No log | 4.6765 | 318 | 0.3286 | 0.7448 | 0.3286 |
210
+ | No log | 4.7059 | 320 | 0.3381 | 0.7533 | 0.3381 |
211
+ | No log | 4.7353 | 322 | 0.3398 | 0.7547 | 0.3398 |
212
+ | No log | 4.7647 | 324 | 0.3416 | 0.7532 | 0.3416 |
213
+ | No log | 4.7941 | 326 | 0.3410 | 0.7532 | 0.3410 |
214
+ | No log | 4.8235 | 328 | 0.3387 | 0.7493 | 0.3387 |
215
+ | No log | 4.8529 | 330 | 0.3357 | 0.7492 | 0.3357 |
216
+ | No log | 4.8824 | 332 | 0.3319 | 0.7451 | 0.3319 |
217
+ | No log | 4.9118 | 334 | 0.3279 | 0.7365 | 0.3279 |
218
+ | No log | 4.9412 | 336 | 0.3242 | 0.7330 | 0.3242 |
219
+ | No log | 4.9706 | 338 | 0.3222 | 0.7336 | 0.3222 |
220
+ | No log | 5.0 | 340 | 0.3214 | 0.7303 | 0.3214 |
221
 
222
 
223
  ### Framework versions
runs/Aug25_05-41-07_358cdae836c1/events.out.tfevents.1724564469.358cdae836c1.24.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54bd7f5a51c0a8c302402644cc3c52ce03205351961b0370a259aa6df00f04b1
3
+ size 63971
runs/Aug25_06-04-04_358cdae836c1/events.out.tfevents.1724565845.358cdae836c1.24.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ad5fb86b74b5f089370092fefb7b1b99292be05c47bc64d4eecb231c03f9d19
3
+ size 63971
runs/Aug25_06-27-02_358cdae836c1/events.out.tfevents.1724567223.358cdae836c1.24.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de78c1ad7b5c9514a0365e804d73c17f286039708d330508569b805ca8f17210
3
+ size 63971
runs/Aug25_06-50-00_358cdae836c1/events.out.tfevents.1724568601.358cdae836c1.24.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:397aab7881cbdd8d469e36e3f452e7f53259711ed48bae070979de23f5f1cdda
3
+ size 63971
runs/Aug25_07-12-58_358cdae836c1/events.out.tfevents.1724569978.358cdae836c1.24.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e569a6a3ad18cd17034388837e72c2b1bdb335a57599e200481a497485daddd2
3
+ size 63971
runs/Aug25_07-35-55_358cdae836c1/events.out.tfevents.1724571356.358cdae836c1.24.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cddfa82f884a624c1d72048008d8746fc02a955c942a2400ec02411afbec77da
3
+ size 65796
runs/Aug25_07-59-55_358cdae836c1/events.out.tfevents.1724572795.358cdae836c1.24.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7bce665bb0a6fd0a50fa5ffa9737ce4a506630a6339c9bb2ece7dfbfc0e72070
3
+ size 65796
runs/Aug25_08-24-05_358cdae836c1/events.out.tfevents.1724574246.358cdae836c1.24.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96fb1e5eabcc8ecf6728578aed01c91aa6ec58eb97e6fe27026366b354c8813d
3
+ size 65796
runs/Aug25_08-48-17_358cdae836c1/events.out.tfevents.1724575698.358cdae836c1.24.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d3c92be3e664d9dcd4569d64608724fd7087cd24cec6357c35d497f924174e9
3
+ size 65796
runs/Aug25_09-12-24_358cdae836c1/events.out.tfevents.1724577145.358cdae836c1.24.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a019797beaa0af48ae48d7ab61416011727fb5808981e7f0fed40d3ad0a7fb71
3
+ size 65796
runs/Aug25_09-36-31_358cdae836c1/events.out.tfevents.1724578591.358cdae836c1.24.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2516d7baab235757fa638cae13a5652a97acb1f0d8f4a5511393f4e1481f39f
3
+ size 66891
runs/Aug25_10-01-23_358cdae836c1/events.out.tfevents.1724580084.358cdae836c1.24.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11814fd5c3edf1578a58be36481aee71aa8ac294b0ad1ab794115cce5eb3aa6a
3
+ size 66891
runs/Aug25_10-26-23_358cdae836c1/events.out.tfevents.1724581584.358cdae836c1.24.12 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f67a069fbafb8cd3f020cf5bcc276327a5e5603b187691d23629e5f998f628ab
3
+ size 66891
runs/Aug25_10-51-26_358cdae836c1/events.out.tfevents.1724583087.358cdae836c1.24.13 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2e96a063019ac9862054e6b5100a510818d6e2ebb474cf7edec685facc216f5
3
+ size 66891
runs/Aug25_11-16-26_358cdae836c1/events.out.tfevents.1724584587.358cdae836c1.24.14 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae2ff0e554421799f8852591a54c64303d895ac1ab8706bf59c3513c0a2db337
3
+ size 66891
runs/Aug25_11-41-27_358cdae836c1/events.out.tfevents.1724586087.358cdae836c1.24.15 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c2a68292ccdd7bc87166075e65bd60c653c82c86d74510b6a5386070a7b1202
3
+ size 66891
runs/Aug25_12-06-23_358cdae836c1/events.out.tfevents.1724587584.358cdae836c1.24.16 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a8008b63ded208d1212c20a2de4cacab83b818295b4ae038a8c99ab43afc0d3
3
+ size 66891
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1c3fd05d86cc67f7ead432fff199362b669fc25721769eac87e3cfcec86f61d5
3
  size 5176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:229efc4b8cd0f76ac32ba3f04b354d0456f90a842c333b5a4d7743bd423b147d
3
  size 5176