omidvaramin commited on
Commit
b2db47d
1 Parent(s): ac9e443

Upload 7 files

Browse files
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:829ebc0229f1e4d39bc20ac215c733d584423afe2cc353d38cafe1dbab57265f
3
+ size 14503
scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe0e942371816058df6777e5346fd4a0fbcfe7adb65796d68edd0eb05192fabe
3
+ size 559
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:133bfb3bce707ce411020620376efdf273dce0a70aafa560febf440782055ac1
3
+ size 623
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[SEP]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[SEP]",
3
+ "do_basic_tokenize": true,
4
+ "do_lower_case": true,
5
+ "full_tokenizer_file": null,
6
+ "mask_token": "[MASK]",
7
+ "name_or_path": "microsoft/prophetnet-large-uncased-cnndm",
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "special_tokens_map_file": null,
12
+ "strip_accents": null,
13
+ "tokenize_chinese_chars": true,
14
+ "tokenizer_class": "ProphetNetTokenizer",
15
+ "tokenizer_file": null,
16
+ "unk_token": "[UNK]",
17
+ "x_sep_token": "[X_SEP]",
18
+ "xprophetnet_tokenizer": false
19
+ }
trainer_state.json ADDED
@@ -0,0 +1,1206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 2.340223550796509,
3
+ "best_model_checkpoint": "results/models/prophetnet-large-uncased-cnndm-NewsRoom/checkpoint-97402",
4
+ "epoch": 2.0,
5
+ "global_step": 97402,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.01,
12
+ "learning_rate": 1.9948871686413012e-05,
13
+ "loss": 3.115,
14
+ "step": 500
15
+ },
16
+ {
17
+ "epoch": 0.02,
18
+ "learning_rate": 1.9897640705529663e-05,
19
+ "loss": 2.9868,
20
+ "step": 1000
21
+ },
22
+ {
23
+ "epoch": 0.03,
24
+ "learning_rate": 1.9846307057349954e-05,
25
+ "loss": 2.9327,
26
+ "step": 1500
27
+ },
28
+ {
29
+ "epoch": 0.04,
30
+ "learning_rate": 1.9794973409170245e-05,
31
+ "loss": 2.9211,
32
+ "step": 2000
33
+ },
34
+ {
35
+ "epoch": 0.05,
36
+ "learning_rate": 1.9743639760990536e-05,
37
+ "loss": 2.9204,
38
+ "step": 2500
39
+ },
40
+ {
41
+ "epoch": 0.06,
42
+ "learning_rate": 1.9692306112810827e-05,
43
+ "loss": 2.8705,
44
+ "step": 3000
45
+ },
46
+ {
47
+ "epoch": 0.07,
48
+ "learning_rate": 1.9640972464631118e-05,
49
+ "loss": 2.8643,
50
+ "step": 3500
51
+ },
52
+ {
53
+ "epoch": 0.08,
54
+ "learning_rate": 1.958974148374777e-05,
55
+ "loss": 2.8546,
56
+ "step": 4000
57
+ },
58
+ {
59
+ "epoch": 0.09,
60
+ "learning_rate": 1.953840783556806e-05,
61
+ "loss": 2.8215,
62
+ "step": 4500
63
+ },
64
+ {
65
+ "epoch": 0.1,
66
+ "learning_rate": 1.948707418738835e-05,
67
+ "loss": 2.8266,
68
+ "step": 5000
69
+ },
70
+ {
71
+ "epoch": 0.11,
72
+ "learning_rate": 1.9435740539208642e-05,
73
+ "loss": 2.7956,
74
+ "step": 5500
75
+ },
76
+ {
77
+ "epoch": 0.12,
78
+ "learning_rate": 1.9384509558325293e-05,
79
+ "loss": 2.807,
80
+ "step": 6000
81
+ },
82
+ {
83
+ "epoch": 0.13,
84
+ "learning_rate": 1.9333175910145584e-05,
85
+ "loss": 2.8149,
86
+ "step": 6500
87
+ },
88
+ {
89
+ "epoch": 0.14,
90
+ "learning_rate": 1.9281842261965875e-05,
91
+ "loss": 2.8123,
92
+ "step": 7000
93
+ },
94
+ {
95
+ "epoch": 0.15,
96
+ "learning_rate": 1.9230508613786166e-05,
97
+ "loss": 2.7988,
98
+ "step": 7500
99
+ },
100
+ {
101
+ "epoch": 0.16,
102
+ "learning_rate": 1.9179174965606457e-05,
103
+ "loss": 2.7968,
104
+ "step": 8000
105
+ },
106
+ {
107
+ "epoch": 0.17,
108
+ "learning_rate": 1.9128046652019467e-05,
109
+ "loss": 2.8055,
110
+ "step": 8500
111
+ },
112
+ {
113
+ "epoch": 0.18,
114
+ "learning_rate": 1.9076815671136118e-05,
115
+ "loss": 2.7762,
116
+ "step": 9000
117
+ },
118
+ {
119
+ "epoch": 0.2,
120
+ "learning_rate": 1.902548202295641e-05,
121
+ "loss": 2.7965,
122
+ "step": 9500
123
+ },
124
+ {
125
+ "epoch": 0.21,
126
+ "learning_rate": 1.89741483747767e-05,
127
+ "loss": 2.7588,
128
+ "step": 10000
129
+ },
130
+ {
131
+ "epoch": 0.22,
132
+ "learning_rate": 1.892281472659699e-05,
133
+ "loss": 2.7493,
134
+ "step": 10500
135
+ },
136
+ {
137
+ "epoch": 0.23,
138
+ "learning_rate": 1.8871481078417282e-05,
139
+ "loss": 2.741,
140
+ "step": 11000
141
+ },
142
+ {
143
+ "epoch": 0.24,
144
+ "learning_rate": 1.8820147430237573e-05,
145
+ "loss": 2.7593,
146
+ "step": 11500
147
+ },
148
+ {
149
+ "epoch": 0.25,
150
+ "learning_rate": 1.8768813782057864e-05,
151
+ "loss": 2.7392,
152
+ "step": 12000
153
+ },
154
+ {
155
+ "epoch": 0.26,
156
+ "learning_rate": 1.8717685468470875e-05,
157
+ "loss": 2.7177,
158
+ "step": 12500
159
+ },
160
+ {
161
+ "epoch": 0.27,
162
+ "learning_rate": 1.8666351820291166e-05,
163
+ "loss": 2.726,
164
+ "step": 13000
165
+ },
166
+ {
167
+ "epoch": 0.28,
168
+ "learning_rate": 1.8615018172111457e-05,
169
+ "loss": 2.7165,
170
+ "step": 13500
171
+ },
172
+ {
173
+ "epoch": 0.29,
174
+ "learning_rate": 1.8563684523931748e-05,
175
+ "loss": 2.7333,
176
+ "step": 14000
177
+ },
178
+ {
179
+ "epoch": 0.3,
180
+ "learning_rate": 1.851235087575204e-05,
181
+ "loss": 2.725,
182
+ "step": 14500
183
+ },
184
+ {
185
+ "epoch": 0.31,
186
+ "learning_rate": 1.846101722757233e-05,
187
+ "loss": 2.728,
188
+ "step": 15000
189
+ },
190
+ {
191
+ "epoch": 0.32,
192
+ "learning_rate": 1.840968357939262e-05,
193
+ "loss": 2.7335,
194
+ "step": 15500
195
+ },
196
+ {
197
+ "epoch": 0.33,
198
+ "learning_rate": 1.8358349931212912e-05,
199
+ "loss": 2.7167,
200
+ "step": 16000
201
+ },
202
+ {
203
+ "epoch": 0.34,
204
+ "learning_rate": 1.8307016283033203e-05,
205
+ "loss": 2.7273,
206
+ "step": 16500
207
+ },
208
+ {
209
+ "epoch": 0.35,
210
+ "learning_rate": 1.8255682634853494e-05,
211
+ "loss": 2.6938,
212
+ "step": 17000
213
+ },
214
+ {
215
+ "epoch": 0.36,
216
+ "learning_rate": 1.8204348986673785e-05,
217
+ "loss": 2.7068,
218
+ "step": 17500
219
+ },
220
+ {
221
+ "epoch": 0.37,
222
+ "learning_rate": 1.8153015338494076e-05,
223
+ "loss": 2.7108,
224
+ "step": 18000
225
+ },
226
+ {
227
+ "epoch": 0.38,
228
+ "learning_rate": 1.8101681690314367e-05,
229
+ "loss": 2.6894,
230
+ "step": 18500
231
+ },
232
+ {
233
+ "epoch": 0.39,
234
+ "learning_rate": 1.805045070943102e-05,
235
+ "loss": 2.7194,
236
+ "step": 19000
237
+ },
238
+ {
239
+ "epoch": 0.4,
240
+ "learning_rate": 1.799911706125131e-05,
241
+ "loss": 2.6978,
242
+ "step": 19500
243
+ },
244
+ {
245
+ "epoch": 0.41,
246
+ "learning_rate": 1.7947783413071603e-05,
247
+ "loss": 2.6842,
248
+ "step": 20000
249
+ },
250
+ {
251
+ "epoch": 0.42,
252
+ "learning_rate": 1.789644976489189e-05,
253
+ "loss": 2.7017,
254
+ "step": 20500
255
+ },
256
+ {
257
+ "epoch": 0.43,
258
+ "learning_rate": 1.7845116116712185e-05,
259
+ "loss": 2.6924,
260
+ "step": 21000
261
+ },
262
+ {
263
+ "epoch": 0.44,
264
+ "learning_rate": 1.7793782468532473e-05,
265
+ "loss": 2.7021,
266
+ "step": 21500
267
+ },
268
+ {
269
+ "epoch": 0.45,
270
+ "learning_rate": 1.7742551487649127e-05,
271
+ "loss": 2.6904,
272
+ "step": 22000
273
+ },
274
+ {
275
+ "epoch": 0.46,
276
+ "learning_rate": 1.7691217839469418e-05,
277
+ "loss": 2.6983,
278
+ "step": 22500
279
+ },
280
+ {
281
+ "epoch": 0.47,
282
+ "learning_rate": 1.7639986858586066e-05,
283
+ "loss": 2.7012,
284
+ "step": 23000
285
+ },
286
+ {
287
+ "epoch": 0.48,
288
+ "learning_rate": 1.758865321040636e-05,
289
+ "loss": 2.6839,
290
+ "step": 23500
291
+ },
292
+ {
293
+ "epoch": 0.49,
294
+ "learning_rate": 1.7537319562226648e-05,
295
+ "loss": 2.6706,
296
+ "step": 24000
297
+ },
298
+ {
299
+ "epoch": 0.5,
300
+ "learning_rate": 1.7486088581343302e-05,
301
+ "loss": 2.6806,
302
+ "step": 24500
303
+ },
304
+ {
305
+ "epoch": 0.51,
306
+ "learning_rate": 1.7434857600459953e-05,
307
+ "loss": 2.6741,
308
+ "step": 25000
309
+ },
310
+ {
311
+ "epoch": 0.52,
312
+ "learning_rate": 1.738352395228024e-05,
313
+ "loss": 2.6666,
314
+ "step": 25500
315
+ },
316
+ {
317
+ "epoch": 0.53,
318
+ "learning_rate": 1.7332190304100535e-05,
319
+ "loss": 2.6669,
320
+ "step": 26000
321
+ },
322
+ {
323
+ "epoch": 0.54,
324
+ "learning_rate": 1.7280856655920822e-05,
325
+ "loss": 2.6774,
326
+ "step": 26500
327
+ },
328
+ {
329
+ "epoch": 0.55,
330
+ "learning_rate": 1.7229523007741117e-05,
331
+ "loss": 2.6755,
332
+ "step": 27000
333
+ },
334
+ {
335
+ "epoch": 0.56,
336
+ "learning_rate": 1.7178189359561404e-05,
337
+ "loss": 2.6688,
338
+ "step": 27500
339
+ },
340
+ {
341
+ "epoch": 0.57,
342
+ "learning_rate": 1.71268557113817e-05,
343
+ "loss": 2.6464,
344
+ "step": 28000
345
+ },
346
+ {
347
+ "epoch": 0.59,
348
+ "learning_rate": 1.7075522063201986e-05,
349
+ "loss": 2.6501,
350
+ "step": 28500
351
+ },
352
+ {
353
+ "epoch": 0.6,
354
+ "learning_rate": 1.702418841502228e-05,
355
+ "loss": 2.6459,
356
+ "step": 29000
357
+ },
358
+ {
359
+ "epoch": 0.61,
360
+ "learning_rate": 1.6972854766842572e-05,
361
+ "loss": 2.6579,
362
+ "step": 29500
363
+ },
364
+ {
365
+ "epoch": 0.62,
366
+ "learning_rate": 1.6921521118662863e-05,
367
+ "loss": 2.6277,
368
+ "step": 30000
369
+ },
370
+ {
371
+ "epoch": 0.63,
372
+ "learning_rate": 1.6870187470483154e-05,
373
+ "loss": 2.6606,
374
+ "step": 30500
375
+ },
376
+ {
377
+ "epoch": 0.64,
378
+ "learning_rate": 1.6818853822303445e-05,
379
+ "loss": 2.6642,
380
+ "step": 31000
381
+ },
382
+ {
383
+ "epoch": 0.65,
384
+ "learning_rate": 1.6767520174123736e-05,
385
+ "loss": 2.6157,
386
+ "step": 31500
387
+ },
388
+ {
389
+ "epoch": 0.66,
390
+ "learning_rate": 1.6716186525944027e-05,
391
+ "loss": 2.6221,
392
+ "step": 32000
393
+ },
394
+ {
395
+ "epoch": 0.67,
396
+ "learning_rate": 1.6664852877764318e-05,
397
+ "loss": 2.6417,
398
+ "step": 32500
399
+ },
400
+ {
401
+ "epoch": 0.68,
402
+ "learning_rate": 1.661351922958461e-05,
403
+ "loss": 2.647,
404
+ "step": 33000
405
+ },
406
+ {
407
+ "epoch": 0.69,
408
+ "learning_rate": 1.656228824870126e-05,
409
+ "loss": 2.6216,
410
+ "step": 33500
411
+ },
412
+ {
413
+ "epoch": 0.7,
414
+ "learning_rate": 1.651095460052155e-05,
415
+ "loss": 2.6208,
416
+ "step": 34000
417
+ },
418
+ {
419
+ "epoch": 0.71,
420
+ "learning_rate": 1.6459620952341842e-05,
421
+ "loss": 2.6175,
422
+ "step": 34500
423
+ },
424
+ {
425
+ "epoch": 0.72,
426
+ "learning_rate": 1.6408287304162133e-05,
427
+ "loss": 2.621,
428
+ "step": 35000
429
+ },
430
+ {
431
+ "epoch": 0.73,
432
+ "learning_rate": 1.6357056323278784e-05,
433
+ "loss": 2.6139,
434
+ "step": 35500
435
+ },
436
+ {
437
+ "epoch": 0.74,
438
+ "learning_rate": 1.6305722675099075e-05,
439
+ "loss": 2.6424,
440
+ "step": 36000
441
+ },
442
+ {
443
+ "epoch": 0.75,
444
+ "learning_rate": 1.6254389026919366e-05,
445
+ "loss": 2.6405,
446
+ "step": 36500
447
+ },
448
+ {
449
+ "epoch": 0.76,
450
+ "learning_rate": 1.6203055378739657e-05,
451
+ "loss": 2.5977,
452
+ "step": 37000
453
+ },
454
+ {
455
+ "epoch": 0.77,
456
+ "learning_rate": 1.6151721730559948e-05,
457
+ "loss": 2.6184,
458
+ "step": 37500
459
+ },
460
+ {
461
+ "epoch": 0.78,
462
+ "learning_rate": 1.6100388082380242e-05,
463
+ "loss": 2.6256,
464
+ "step": 38000
465
+ },
466
+ {
467
+ "epoch": 0.79,
468
+ "learning_rate": 1.604905443420053e-05,
469
+ "loss": 2.6289,
470
+ "step": 38500
471
+ },
472
+ {
473
+ "epoch": 0.8,
474
+ "learning_rate": 1.5997720786020824e-05,
475
+ "loss": 2.6286,
476
+ "step": 39000
477
+ },
478
+ {
479
+ "epoch": 0.81,
480
+ "learning_rate": 1.594659247243383e-05,
481
+ "loss": 2.5953,
482
+ "step": 39500
483
+ },
484
+ {
485
+ "epoch": 0.82,
486
+ "learning_rate": 1.5895258824254122e-05,
487
+ "loss": 2.6369,
488
+ "step": 40000
489
+ },
490
+ {
491
+ "epoch": 0.83,
492
+ "learning_rate": 1.5843925176074413e-05,
493
+ "loss": 2.6003,
494
+ "step": 40500
495
+ },
496
+ {
497
+ "epoch": 0.84,
498
+ "learning_rate": 1.5792591527894704e-05,
499
+ "loss": 2.6032,
500
+ "step": 41000
501
+ },
502
+ {
503
+ "epoch": 0.85,
504
+ "learning_rate": 1.5741257879715e-05,
505
+ "loss": 2.6312,
506
+ "step": 41500
507
+ },
508
+ {
509
+ "epoch": 0.86,
510
+ "learning_rate": 1.5689924231535287e-05,
511
+ "loss": 2.6386,
512
+ "step": 42000
513
+ },
514
+ {
515
+ "epoch": 0.87,
516
+ "learning_rate": 1.563859058335558e-05,
517
+ "loss": 2.6112,
518
+ "step": 42500
519
+ },
520
+ {
521
+ "epoch": 0.88,
522
+ "learning_rate": 1.558725693517587e-05,
523
+ "loss": 2.6156,
524
+ "step": 43000
525
+ },
526
+ {
527
+ "epoch": 0.89,
528
+ "learning_rate": 1.5535923286996163e-05,
529
+ "loss": 2.5918,
530
+ "step": 43500
531
+ },
532
+ {
533
+ "epoch": 0.9,
534
+ "learning_rate": 1.548458963881645e-05,
535
+ "loss": 2.6063,
536
+ "step": 44000
537
+ },
538
+ {
539
+ "epoch": 0.91,
540
+ "learning_rate": 1.5433358657933105e-05,
541
+ "loss": 2.6263,
542
+ "step": 44500
543
+ },
544
+ {
545
+ "epoch": 0.92,
546
+ "learning_rate": 1.5382025009753392e-05,
547
+ "loss": 2.6083,
548
+ "step": 45000
549
+ },
550
+ {
551
+ "epoch": 0.93,
552
+ "learning_rate": 1.5330691361573687e-05,
553
+ "loss": 2.602,
554
+ "step": 45500
555
+ },
556
+ {
557
+ "epoch": 0.94,
558
+ "learning_rate": 1.5279357713393978e-05,
559
+ "loss": 2.5989,
560
+ "step": 46000
561
+ },
562
+ {
563
+ "epoch": 0.95,
564
+ "learning_rate": 1.5228024065214269e-05,
565
+ "loss": 2.5898,
566
+ "step": 46500
567
+ },
568
+ {
569
+ "epoch": 0.97,
570
+ "learning_rate": 1.5176690417034558e-05,
571
+ "loss": 2.6033,
572
+ "step": 47000
573
+ },
574
+ {
575
+ "epoch": 0.98,
576
+ "learning_rate": 1.5125356768854851e-05,
577
+ "loss": 2.5861,
578
+ "step": 47500
579
+ },
580
+ {
581
+ "epoch": 0.99,
582
+ "learning_rate": 1.50741257879715e-05,
583
+ "loss": 2.5959,
584
+ "step": 48000
585
+ },
586
+ {
587
+ "epoch": 1.0,
588
+ "learning_rate": 1.5022792139791791e-05,
589
+ "loss": 2.6006,
590
+ "step": 48500
591
+ },
592
+ {
593
+ "epoch": 1.0,
594
+ "eval_gen_len": 13.17,
595
+ "eval_loss": 2.431262493133545,
596
+ "eval_rouge1": 36.6311,
597
+ "eval_rouge2": 19.2397,
598
+ "eval_rougeL": 34.5043,
599
+ "eval_rougeLsum": 34.9589,
600
+ "eval_runtime": 12.9254,
601
+ "eval_samples_per_second": 7.737,
602
+ "eval_steps_per_second": 0.542,
603
+ "step": 48701
604
+ },
605
+ {
606
+ "epoch": 1.01,
607
+ "learning_rate": 1.4971458491612084e-05,
608
+ "loss": 2.4662,
609
+ "step": 49000
610
+ },
611
+ {
612
+ "epoch": 1.02,
613
+ "learning_rate": 1.4920124843432375e-05,
614
+ "loss": 2.3465,
615
+ "step": 49500
616
+ },
617
+ {
618
+ "epoch": 1.03,
619
+ "learning_rate": 1.4868791195252666e-05,
620
+ "loss": 2.3791,
621
+ "step": 50000
622
+ },
623
+ {
624
+ "epoch": 1.04,
625
+ "learning_rate": 1.4817457547072957e-05,
626
+ "loss": 2.3492,
627
+ "step": 50500
628
+ },
629
+ {
630
+ "epoch": 1.05,
631
+ "learning_rate": 1.4766123898893248e-05,
632
+ "loss": 2.3735,
633
+ "step": 51000
634
+ },
635
+ {
636
+ "epoch": 1.06,
637
+ "learning_rate": 1.471479025071354e-05,
638
+ "loss": 2.3669,
639
+ "step": 51500
640
+ },
641
+ {
642
+ "epoch": 1.07,
643
+ "learning_rate": 1.466345660253383e-05,
644
+ "loss": 2.3797,
645
+ "step": 52000
646
+ },
647
+ {
648
+ "epoch": 1.08,
649
+ "learning_rate": 1.4612122954354123e-05,
650
+ "loss": 2.3744,
651
+ "step": 52500
652
+ },
653
+ {
654
+ "epoch": 1.09,
655
+ "learning_rate": 1.4560891973470772e-05,
656
+ "loss": 2.3798,
657
+ "step": 53000
658
+ },
659
+ {
660
+ "epoch": 1.1,
661
+ "learning_rate": 1.4509558325291063e-05,
662
+ "loss": 2.3719,
663
+ "step": 53500
664
+ },
665
+ {
666
+ "epoch": 1.11,
667
+ "learning_rate": 1.4458224677111354e-05,
668
+ "loss": 2.3724,
669
+ "step": 54000
670
+ },
671
+ {
672
+ "epoch": 1.12,
673
+ "learning_rate": 1.4406993696228005e-05,
674
+ "loss": 2.3723,
675
+ "step": 54500
676
+ },
677
+ {
678
+ "epoch": 1.13,
679
+ "learning_rate": 1.4355660048048297e-05,
680
+ "loss": 2.3588,
681
+ "step": 55000
682
+ },
683
+ {
684
+ "epoch": 1.14,
685
+ "learning_rate": 1.4304326399868587e-05,
686
+ "loss": 2.3719,
687
+ "step": 55500
688
+ },
689
+ {
690
+ "epoch": 1.15,
691
+ "learning_rate": 1.4253095418985237e-05,
692
+ "loss": 2.3747,
693
+ "step": 56000
694
+ },
695
+ {
696
+ "epoch": 1.16,
697
+ "learning_rate": 1.4201761770805528e-05,
698
+ "loss": 2.3708,
699
+ "step": 56500
700
+ },
701
+ {
702
+ "epoch": 1.17,
703
+ "learning_rate": 1.415042812262582e-05,
704
+ "loss": 2.3665,
705
+ "step": 57000
706
+ },
707
+ {
708
+ "epoch": 1.18,
709
+ "learning_rate": 1.409909447444611e-05,
710
+ "loss": 2.3773,
711
+ "step": 57500
712
+ },
713
+ {
714
+ "epoch": 1.19,
715
+ "learning_rate": 1.4047760826266402e-05,
716
+ "loss": 2.3692,
717
+ "step": 58000
718
+ },
719
+ {
720
+ "epoch": 1.2,
721
+ "learning_rate": 1.3996427178086693e-05,
722
+ "loss": 2.3853,
723
+ "step": 58500
724
+ },
725
+ {
726
+ "epoch": 1.21,
727
+ "learning_rate": 1.3945093529906985e-05,
728
+ "loss": 2.3764,
729
+ "step": 59000
730
+ },
731
+ {
732
+ "epoch": 1.22,
733
+ "learning_rate": 1.3893759881727276e-05,
734
+ "loss": 2.3565,
735
+ "step": 59500
736
+ },
737
+ {
738
+ "epoch": 1.23,
739
+ "learning_rate": 1.3842426233547567e-05,
740
+ "loss": 2.3514,
741
+ "step": 60000
742
+ },
743
+ {
744
+ "epoch": 1.24,
745
+ "learning_rate": 1.3791092585367858e-05,
746
+ "loss": 2.3657,
747
+ "step": 60500
748
+ },
749
+ {
750
+ "epoch": 1.25,
751
+ "learning_rate": 1.373975893718815e-05,
752
+ "loss": 2.3637,
753
+ "step": 61000
754
+ },
755
+ {
756
+ "epoch": 1.26,
757
+ "learning_rate": 1.368842528900844e-05,
758
+ "loss": 2.3653,
759
+ "step": 61500
760
+ },
761
+ {
762
+ "epoch": 1.27,
763
+ "learning_rate": 1.363719430812509e-05,
764
+ "loss": 2.3765,
765
+ "step": 62000
766
+ },
767
+ {
768
+ "epoch": 1.28,
769
+ "learning_rate": 1.3585860659945382e-05,
770
+ "loss": 2.3799,
771
+ "step": 62500
772
+ },
773
+ {
774
+ "epoch": 1.29,
775
+ "learning_rate": 1.3534527011765672e-05,
776
+ "loss": 2.3681,
777
+ "step": 63000
778
+ },
779
+ {
780
+ "epoch": 1.3,
781
+ "learning_rate": 1.3483193363585964e-05,
782
+ "loss": 2.3751,
783
+ "step": 63500
784
+ },
785
+ {
786
+ "epoch": 1.31,
787
+ "learning_rate": 1.3431859715406255e-05,
788
+ "loss": 2.3787,
789
+ "step": 64000
790
+ },
791
+ {
792
+ "epoch": 1.32,
793
+ "learning_rate": 1.3380526067226546e-05,
794
+ "loss": 2.3769,
795
+ "step": 64500
796
+ },
797
+ {
798
+ "epoch": 1.33,
799
+ "learning_rate": 1.3329192419046837e-05,
800
+ "loss": 2.3727,
801
+ "step": 65000
802
+ },
803
+ {
804
+ "epoch": 1.34,
805
+ "learning_rate": 1.327796143816349e-05,
806
+ "loss": 2.3642,
807
+ "step": 65500
808
+ },
809
+ {
810
+ "epoch": 1.36,
811
+ "learning_rate": 1.322662778998378e-05,
812
+ "loss": 2.3593,
813
+ "step": 66000
814
+ },
815
+ {
816
+ "epoch": 1.37,
817
+ "learning_rate": 1.317539680910043e-05,
818
+ "loss": 2.3817,
819
+ "step": 66500
820
+ },
821
+ {
822
+ "epoch": 1.38,
823
+ "learning_rate": 1.3124063160920721e-05,
824
+ "loss": 2.3743,
825
+ "step": 67000
826
+ },
827
+ {
828
+ "epoch": 1.39,
829
+ "learning_rate": 1.3072729512741014e-05,
830
+ "loss": 2.402,
831
+ "step": 67500
832
+ },
833
+ {
834
+ "epoch": 1.4,
835
+ "learning_rate": 1.3021395864561303e-05,
836
+ "loss": 2.3731,
837
+ "step": 68000
838
+ },
839
+ {
840
+ "epoch": 1.41,
841
+ "learning_rate": 1.2970062216381596e-05,
842
+ "loss": 2.3722,
843
+ "step": 68500
844
+ },
845
+ {
846
+ "epoch": 1.42,
847
+ "learning_rate": 1.2918728568201885e-05,
848
+ "loss": 2.3703,
849
+ "step": 69000
850
+ },
851
+ {
852
+ "epoch": 1.43,
853
+ "learning_rate": 1.2867394920022178e-05,
854
+ "loss": 2.3565,
855
+ "step": 69500
856
+ },
857
+ {
858
+ "epoch": 1.44,
859
+ "learning_rate": 1.2816061271842469e-05,
860
+ "loss": 2.3842,
861
+ "step": 70000
862
+ },
863
+ {
864
+ "epoch": 1.45,
865
+ "learning_rate": 1.276472762366276e-05,
866
+ "loss": 2.3783,
867
+ "step": 70500
868
+ },
869
+ {
870
+ "epoch": 1.46,
871
+ "learning_rate": 1.2713393975483051e-05,
872
+ "loss": 2.3662,
873
+ "step": 71000
874
+ },
875
+ {
876
+ "epoch": 1.47,
877
+ "learning_rate": 1.2662060327303342e-05,
878
+ "loss": 2.3613,
879
+ "step": 71500
880
+ },
881
+ {
882
+ "epoch": 1.48,
883
+ "learning_rate": 1.2610726679123633e-05,
884
+ "loss": 2.3799,
885
+ "step": 72000
886
+ },
887
+ {
888
+ "epoch": 1.49,
889
+ "learning_rate": 1.2559495698240282e-05,
890
+ "loss": 2.3457,
891
+ "step": 72500
892
+ },
893
+ {
894
+ "epoch": 1.5,
895
+ "learning_rate": 1.2508162050060575e-05,
896
+ "loss": 2.3773,
897
+ "step": 73000
898
+ },
899
+ {
900
+ "epoch": 1.51,
901
+ "learning_rate": 1.2456931069177226e-05,
902
+ "loss": 2.3736,
903
+ "step": 73500
904
+ },
905
+ {
906
+ "epoch": 1.52,
907
+ "learning_rate": 1.2405597420997517e-05,
908
+ "loss": 2.3625,
909
+ "step": 74000
910
+ },
911
+ {
912
+ "epoch": 1.53,
913
+ "learning_rate": 1.2354263772817808e-05,
914
+ "loss": 2.3818,
915
+ "step": 74500
916
+ },
917
+ {
918
+ "epoch": 1.54,
919
+ "learning_rate": 1.2302930124638099e-05,
920
+ "loss": 2.3538,
921
+ "step": 75000
922
+ },
923
+ {
924
+ "epoch": 1.55,
925
+ "learning_rate": 1.225159647645839e-05,
926
+ "loss": 2.381,
927
+ "step": 75500
928
+ },
929
+ {
930
+ "epoch": 1.56,
931
+ "learning_rate": 1.2200262828278682e-05,
932
+ "loss": 2.3648,
933
+ "step": 76000
934
+ },
935
+ {
936
+ "epoch": 1.57,
937
+ "learning_rate": 1.2148929180098972e-05,
938
+ "loss": 2.3679,
939
+ "step": 76500
940
+ },
941
+ {
942
+ "epoch": 1.58,
943
+ "learning_rate": 1.2097595531919265e-05,
944
+ "loss": 2.3437,
945
+ "step": 77000
946
+ },
947
+ {
948
+ "epoch": 1.59,
949
+ "learning_rate": 1.2046364551035914e-05,
950
+ "loss": 2.36,
951
+ "step": 77500
952
+ },
953
+ {
954
+ "epoch": 1.6,
955
+ "learning_rate": 1.1995030902856206e-05,
956
+ "loss": 2.3563,
957
+ "step": 78000
958
+ },
959
+ {
960
+ "epoch": 1.61,
961
+ "learning_rate": 1.1943697254676496e-05,
962
+ "loss": 2.3474,
963
+ "step": 78500
964
+ },
965
+ {
966
+ "epoch": 1.62,
967
+ "learning_rate": 1.1892363606496788e-05,
968
+ "loss": 2.3577,
969
+ "step": 79000
970
+ },
971
+ {
972
+ "epoch": 1.63,
973
+ "learning_rate": 1.1841029958317078e-05,
974
+ "loss": 2.3795,
975
+ "step": 79500
976
+ },
977
+ {
978
+ "epoch": 1.64,
979
+ "learning_rate": 1.178969631013737e-05,
980
+ "loss": 2.3727,
981
+ "step": 80000
982
+ },
983
+ {
984
+ "epoch": 1.65,
985
+ "learning_rate": 1.1738362661957661e-05,
986
+ "loss": 2.3259,
987
+ "step": 80500
988
+ },
989
+ {
990
+ "epoch": 1.66,
991
+ "learning_rate": 1.1687029013777953e-05,
992
+ "loss": 2.3806,
993
+ "step": 81000
994
+ },
995
+ {
996
+ "epoch": 1.67,
997
+ "learning_rate": 1.1635798032894603e-05,
998
+ "loss": 2.3705,
999
+ "step": 81500
1000
+ },
1001
+ {
1002
+ "epoch": 1.68,
1003
+ "learning_rate": 1.1584464384714893e-05,
1004
+ "loss": 2.3839,
1005
+ "step": 82000
1006
+ },
1007
+ {
1008
+ "epoch": 1.69,
1009
+ "learning_rate": 1.1533130736535185e-05,
1010
+ "loss": 2.3448,
1011
+ "step": 82500
1012
+ },
1013
+ {
1014
+ "epoch": 1.7,
1015
+ "learning_rate": 1.1481797088355475e-05,
1016
+ "loss": 2.3528,
1017
+ "step": 83000
1018
+ },
1019
+ {
1020
+ "epoch": 1.71,
1021
+ "learning_rate": 1.1430463440175767e-05,
1022
+ "loss": 2.3618,
1023
+ "step": 83500
1024
+ },
1025
+ {
1026
+ "epoch": 1.72,
1027
+ "learning_rate": 1.1379129791996057e-05,
1028
+ "loss": 2.3763,
1029
+ "step": 84000
1030
+ },
1031
+ {
1032
+ "epoch": 1.74,
1033
+ "learning_rate": 1.132789881111271e-05,
1034
+ "loss": 2.3391,
1035
+ "step": 84500
1036
+ },
1037
+ {
1038
+ "epoch": 1.75,
1039
+ "learning_rate": 1.1276565162933e-05,
1040
+ "loss": 2.3545,
1041
+ "step": 85000
1042
+ },
1043
+ {
1044
+ "epoch": 1.76,
1045
+ "learning_rate": 1.1225231514753291e-05,
1046
+ "loss": 2.3352,
1047
+ "step": 85500
1048
+ },
1049
+ {
1050
+ "epoch": 1.77,
1051
+ "learning_rate": 1.1173897866573582e-05,
1052
+ "loss": 2.3718,
1053
+ "step": 86000
1054
+ },
1055
+ {
1056
+ "epoch": 1.78,
1057
+ "learning_rate": 1.1122666885690235e-05,
1058
+ "loss": 2.3774,
1059
+ "step": 86500
1060
+ },
1061
+ {
1062
+ "epoch": 1.79,
1063
+ "learning_rate": 1.1071333237510524e-05,
1064
+ "loss": 2.3454,
1065
+ "step": 87000
1066
+ },
1067
+ {
1068
+ "epoch": 1.8,
1069
+ "learning_rate": 1.1020102256627175e-05,
1070
+ "loss": 2.3579,
1071
+ "step": 87500
1072
+ },
1073
+ {
1074
+ "epoch": 1.81,
1075
+ "learning_rate": 1.0968871275743827e-05,
1076
+ "loss": 2.3349,
1077
+ "step": 88000
1078
+ },
1079
+ {
1080
+ "epoch": 1.82,
1081
+ "learning_rate": 1.0917640294860476e-05,
1082
+ "loss": 2.3655,
1083
+ "step": 88500
1084
+ },
1085
+ {
1086
+ "epoch": 1.83,
1087
+ "learning_rate": 1.0866306646680766e-05,
1088
+ "loss": 2.3605,
1089
+ "step": 89000
1090
+ },
1091
+ {
1092
+ "epoch": 1.84,
1093
+ "learning_rate": 1.0814972998501058e-05,
1094
+ "loss": 2.3601,
1095
+ "step": 89500
1096
+ },
1097
+ {
1098
+ "epoch": 1.85,
1099
+ "learning_rate": 1.076363935032135e-05,
1100
+ "loss": 2.3622,
1101
+ "step": 90000
1102
+ },
1103
+ {
1104
+ "epoch": 1.86,
1105
+ "learning_rate": 1.071230570214164e-05,
1106
+ "loss": 2.3586,
1107
+ "step": 90500
1108
+ },
1109
+ {
1110
+ "epoch": 1.87,
1111
+ "learning_rate": 1.0660972053961932e-05,
1112
+ "loss": 2.3637,
1113
+ "step": 91000
1114
+ },
1115
+ {
1116
+ "epoch": 1.88,
1117
+ "learning_rate": 1.0609638405782223e-05,
1118
+ "loss": 2.3384,
1119
+ "step": 91500
1120
+ },
1121
+ {
1122
+ "epoch": 1.89,
1123
+ "learning_rate": 1.0558304757602514e-05,
1124
+ "loss": 2.3503,
1125
+ "step": 92000
1126
+ },
1127
+ {
1128
+ "epoch": 1.9,
1129
+ "learning_rate": 1.0506971109422806e-05,
1130
+ "loss": 2.3703,
1131
+ "step": 92500
1132
+ },
1133
+ {
1134
+ "epoch": 1.91,
1135
+ "learning_rate": 1.0455637461243096e-05,
1136
+ "loss": 2.3415,
1137
+ "step": 93000
1138
+ },
1139
+ {
1140
+ "epoch": 1.92,
1141
+ "learning_rate": 1.0404303813063388e-05,
1142
+ "loss": 2.3576,
1143
+ "step": 93500
1144
+ },
1145
+ {
1146
+ "epoch": 1.93,
1147
+ "learning_rate": 1.0352970164883678e-05,
1148
+ "loss": 2.3351,
1149
+ "step": 94000
1150
+ },
1151
+ {
1152
+ "epoch": 1.94,
1153
+ "learning_rate": 1.030173918400033e-05,
1154
+ "loss": 2.3393,
1155
+ "step": 94500
1156
+ },
1157
+ {
1158
+ "epoch": 1.95,
1159
+ "learning_rate": 1.025040553582062e-05,
1160
+ "loss": 2.3387,
1161
+ "step": 95000
1162
+ },
1163
+ {
1164
+ "epoch": 1.96,
1165
+ "learning_rate": 1.019917455493727e-05,
1166
+ "loss": 2.3464,
1167
+ "step": 95500
1168
+ },
1169
+ {
1170
+ "epoch": 1.97,
1171
+ "learning_rate": 1.0147840906757563e-05,
1172
+ "loss": 2.3427,
1173
+ "step": 96000
1174
+ },
1175
+ {
1176
+ "epoch": 1.98,
1177
+ "learning_rate": 1.0096507258577854e-05,
1178
+ "loss": 2.3678,
1179
+ "step": 96500
1180
+ },
1181
+ {
1182
+ "epoch": 1.99,
1183
+ "learning_rate": 1.0045173610398145e-05,
1184
+ "loss": 2.3497,
1185
+ "step": 97000
1186
+ },
1187
+ {
1188
+ "epoch": 2.0,
1189
+ "eval_gen_len": 13.07,
1190
+ "eval_loss": 2.340223550796509,
1191
+ "eval_rouge1": 38.6817,
1192
+ "eval_rouge2": 21.0493,
1193
+ "eval_rougeL": 37.0384,
1194
+ "eval_rougeLsum": 37.1377,
1195
+ "eval_runtime": 8.7057,
1196
+ "eval_samples_per_second": 11.487,
1197
+ "eval_steps_per_second": 0.804,
1198
+ "step": 97402
1199
+ }
1200
+ ],
1201
+ "max_steps": 194804,
1202
+ "num_train_epochs": 4,
1203
+ "total_flos": 3.4246205705319014e+18,
1204
+ "trial_name": null,
1205
+ "trial_params": null
1206
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:900e49aa40dd02bc4b8ec7a3e25d431ad454b22bd5a4805c5bc27d0297ecf122
3
+ size 3439