|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.2429831595829992, |
|
"eval_steps": 500, |
|
"global_step": 3100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99999449047447e-05, |
|
"loss": 2.7413, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9999779619221645e-05, |
|
"loss": 2.6684, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999950414415935e-05, |
|
"loss": 2.6511, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999911848077199e-05, |
|
"loss": 2.72, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999862263075944e-05, |
|
"loss": 2.7222, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999801659630722e-05, |
|
"loss": 2.7479, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999730038008649e-05, |
|
"loss": 2.6063, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9996473985254055e-05, |
|
"loss": 2.576, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999553741545237e-05, |
|
"loss": 2.579, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999449067480945e-05, |
|
"loss": 2.5114, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9993333767938954e-05, |
|
"loss": 2.6174, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9992066699940085e-05, |
|
"loss": 2.6581, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9990689476397586e-05, |
|
"loss": 2.586, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998920210338175e-05, |
|
"loss": 2.4642, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9987604587448343e-05, |
|
"loss": 2.6262, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998589693563861e-05, |
|
"loss": 2.5678, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998407915547924e-05, |
|
"loss": 2.5597, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9982151254982304e-05, |
|
"loss": 2.6578, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998011324264527e-05, |
|
"loss": 2.4825, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.997796512745091e-05, |
|
"loss": 2.6197, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.997570691886732e-05, |
|
"loss": 2.4921, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.99733386268478e-05, |
|
"loss": 2.5126, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.99708602618309e-05, |
|
"loss": 2.584, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9968271834740305e-05, |
|
"loss": 2.4991, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996557335698482e-05, |
|
"loss": 2.5242, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996276484045832e-05, |
|
"loss": 2.4506, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.995984629753967e-05, |
|
"loss": 2.5479, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9956817741092696e-05, |
|
"loss": 2.5316, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.995367918446613e-05, |
|
"loss": 2.6053, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.995043064149354e-05, |
|
"loss": 2.4533, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9947072126493276e-05, |
|
"loss": 2.5279, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.994360365426839e-05, |
|
"loss": 2.4994, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.994002524010659e-05, |
|
"loss": 2.4675, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9936336899780166e-05, |
|
"loss": 2.5233, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.993253864954592e-05, |
|
"loss": 2.4974, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.992863050614511e-05, |
|
"loss": 2.6295, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9924612486803346e-05, |
|
"loss": 2.4744, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.992048460923052e-05, |
|
"loss": 2.5248, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991624689162076e-05, |
|
"loss": 2.5727, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991189935265231e-05, |
|
"loss": 2.4845, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.990744201148748e-05, |
|
"loss": 2.4544, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.990287488777253e-05, |
|
"loss": 2.4383, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989819800163761e-05, |
|
"loss": 2.5263, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989341137369666e-05, |
|
"loss": 2.5219, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9888515025047316e-05, |
|
"loss": 2.4512, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.988350897727083e-05, |
|
"loss": 2.491, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9878393252431953e-05, |
|
"loss": 2.5404, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.987316787307888e-05, |
|
"loss": 2.4777, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9867832862243055e-05, |
|
"loss": 2.4498, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.986238824343923e-05, |
|
"loss": 2.4174, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.985683404066519e-05, |
|
"loss": 2.4589, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.985117027840175e-05, |
|
"loss": 2.5029, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.984539698161264e-05, |
|
"loss": 2.4376, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.983951417574436e-05, |
|
"loss": 2.526, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.983352188672608e-05, |
|
"loss": 2.5214, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.982742014096952e-05, |
|
"loss": 2.5188, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.982120896536888e-05, |
|
"loss": 2.4175, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.981488838730066e-05, |
|
"loss": 2.5346, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.980845843462357e-05, |
|
"loss": 2.496, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.98019191356784e-05, |
|
"loss": 2.477, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9795270519287886e-05, |
|
"loss": 2.4205, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9788512614756624e-05, |
|
"loss": 2.4621, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9781645451870875e-05, |
|
"loss": 2.5098, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9774669060898496e-05, |
|
"loss": 2.4847, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.976758347258877e-05, |
|
"loss": 2.4873, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.976038871817228e-05, |
|
"loss": 2.4462, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9753084829360776e-05, |
|
"loss": 2.4994, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.974567183834702e-05, |
|
"loss": 2.4994, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9738149777804665e-05, |
|
"loss": 2.4584, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.97305186808881e-05, |
|
"loss": 2.4294, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9722778581232305e-05, |
|
"loss": 2.4499, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9714929512952704e-05, |
|
"loss": 2.5851, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9706971510645e-05, |
|
"loss": 2.4746, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.969890460938505e-05, |
|
"loss": 2.4184, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9690728844728704e-05, |
|
"loss": 2.4716, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.968244425271161e-05, |
|
"loss": 2.4477, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9674050869849124e-05, |
|
"loss": 2.4563, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.966554873313608e-05, |
|
"loss": 2.3708, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9656937880046676e-05, |
|
"loss": 2.4351, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9648218348534284e-05, |
|
"loss": 2.3933, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.963939017703128e-05, |
|
"loss": 2.5871, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9630453404448905e-05, |
|
"loss": 2.3965, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.962140807017705e-05, |
|
"loss": 2.372, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.961225421408412e-05, |
|
"loss": 2.4847, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.960299187651684e-05, |
|
"loss": 2.4046, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.959362109830007e-05, |
|
"loss": 2.3912, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9584141920736656e-05, |
|
"loss": 2.4732, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.957455438560721e-05, |
|
"loss": 2.4854, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.956485853516993e-05, |
|
"loss": 2.3736, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9555054412160476e-05, |
|
"loss": 2.4222, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9545142059791686e-05, |
|
"loss": 2.4498, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9535121521753434e-05, |
|
"loss": 2.4172, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.952499284221247e-05, |
|
"loss": 2.4742, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.951475606581215e-05, |
|
"loss": 2.4008, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.950441123767231e-05, |
|
"loss": 2.4278, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.949395840338903e-05, |
|
"loss": 2.5041, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.948339760903442e-05, |
|
"loss": 2.4088, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.947272890115647e-05, |
|
"loss": 2.466, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.946195232677878e-05, |
|
"loss": 2.4404, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9451067933400406e-05, |
|
"loss": 2.3658, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9440075768995625e-05, |
|
"loss": 2.4247, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.942897588201372e-05, |
|
"loss": 2.5082, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.941776832137879e-05, |
|
"loss": 2.5545, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.940645313648949e-05, |
|
"loss": 2.4665, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.939503037721888e-05, |
|
"loss": 2.4195, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.938350009391413e-05, |
|
"loss": 2.4908, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.937186233739637e-05, |
|
"loss": 2.4093, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.93601171589604e-05, |
|
"loss": 2.44, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9348264610374494e-05, |
|
"loss": 2.4989, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9336304743880226e-05, |
|
"loss": 2.3786, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.932423761219211e-05, |
|
"loss": 2.4305, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.931206326849751e-05, |
|
"loss": 2.4012, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.92997817664563e-05, |
|
"loss": 2.4263, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9287393160200676e-05, |
|
"loss": 2.4749, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.927489750433492e-05, |
|
"loss": 2.5083, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.926229485393513e-05, |
|
"loss": 2.5228, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.924958526454902e-05, |
|
"loss": 2.4623, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.923676879219562e-05, |
|
"loss": 2.4199, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9223845493365085e-05, |
|
"loss": 2.2906, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9210815425018406e-05, |
|
"loss": 2.5539, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.919767864458718e-05, |
|
"loss": 2.3456, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9184435209973354e-05, |
|
"loss": 2.4328, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.917108517954895e-05, |
|
"loss": 2.3412, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9157628612155836e-05, |
|
"loss": 2.604, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9144065567105465e-05, |
|
"loss": 2.4007, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.913039610417859e-05, |
|
"loss": 2.437, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9116620283624996e-05, |
|
"loss": 2.5004, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.91027381661633e-05, |
|
"loss": 2.5045, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.908874981298057e-05, |
|
"loss": 2.3794, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9074655285732196e-05, |
|
"loss": 2.4621, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.906045464654148e-05, |
|
"loss": 2.549, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9046147957999454e-05, |
|
"loss": 2.4567, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9031735283164574e-05, |
|
"loss": 2.3243, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.901721668556244e-05, |
|
"loss": 2.4758, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9002592229185515e-05, |
|
"loss": 2.5083, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.898786197849285e-05, |
|
"loss": 2.4428, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.897302599840981e-05, |
|
"loss": 2.4403, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.895808435432776e-05, |
|
"loss": 2.4401, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8943037112103784e-05, |
|
"loss": 2.3789, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.892788433806042e-05, |
|
"loss": 2.3945, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8912626098985355e-05, |
|
"loss": 2.4758, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.88972624621311e-05, |
|
"loss": 2.4646, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.888179349521475e-05, |
|
"loss": 2.4225, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.886621926641762e-05, |
|
"loss": 2.4074, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8850539844385017e-05, |
|
"loss": 2.4438, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.883475529822587e-05, |
|
"loss": 2.3666, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.881886569751248e-05, |
|
"loss": 2.3782, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.880287111228016e-05, |
|
"loss": 2.4916, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.878677161302698e-05, |
|
"loss": 2.3639, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.877056727071342e-05, |
|
"loss": 2.4333, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8754258156762075e-05, |
|
"loss": 2.3907, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8737844343057315e-05, |
|
"loss": 2.433, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8721325901945015e-05, |
|
"loss": 2.3849, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.870470290623218e-05, |
|
"loss": 2.419, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8687975429186685e-05, |
|
"loss": 2.3694, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.867114354453688e-05, |
|
"loss": 2.4335, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.865420732647133e-05, |
|
"loss": 2.4168, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.863716684963846e-05, |
|
"loss": 2.419, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.862002218914622e-05, |
|
"loss": 2.335, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.860277342056176e-05, |
|
"loss": 2.4059, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.858542061991112e-05, |
|
"loss": 2.3423, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.856796386367884e-05, |
|
"loss": 2.4135, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.855040322880769e-05, |
|
"loss": 2.3652, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.853273879269827e-05, |
|
"loss": 2.3511, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8514970633208726e-05, |
|
"loss": 2.413, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.849709882865435e-05, |
|
"loss": 2.4445, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.847912345780727e-05, |
|
"loss": 2.4221, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.846104459989611e-05, |
|
"loss": 2.4992, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.84428623346056e-05, |
|
"loss": 2.3923, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.842457674207627e-05, |
|
"loss": 2.3666, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.8406187902904076e-05, |
|
"loss": 2.4329, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.8387695898140026e-05, |
|
"loss": 2.3429, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.836910080928987e-05, |
|
"loss": 2.4428, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.83504027183137e-05, |
|
"loss": 2.3842, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.833160170762561e-05, |
|
"loss": 2.4064, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8312697860093295e-05, |
|
"loss": 2.3705, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.829369125903776e-05, |
|
"loss": 2.4151, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8274581988232894e-05, |
|
"loss": 2.4219, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.825537013190509e-05, |
|
"loss": 2.4042, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.823605577473293e-05, |
|
"loss": 2.4509, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8216639001846764e-05, |
|
"loss": 2.4255, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8197119898828367e-05, |
|
"loss": 2.4928, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.817749855171052e-05, |
|
"loss": 2.4044, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8157775046976684e-05, |
|
"loss": 2.4085, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.813794947156058e-05, |
|
"loss": 2.3941, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8118021912845815e-05, |
|
"loss": 2.3493, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8097992458665506e-05, |
|
"loss": 2.3367, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.807786119730189e-05, |
|
"loss": 2.3865, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8057628217485916e-05, |
|
"loss": 2.3885, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.803729360839688e-05, |
|
"loss": 2.4296, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.801685745966203e-05, |
|
"loss": 2.3856, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.799631986135615e-05, |
|
"loss": 2.3631, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.797568090400117e-05, |
|
"loss": 2.3882, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7954940678565785e-05, |
|
"loss": 2.3396, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.793409927646504e-05, |
|
"loss": 2.3548, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.791315678955991e-05, |
|
"loss": 2.4576, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.789211331015695e-05, |
|
"loss": 2.4523, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.787096893100781e-05, |
|
"loss": 2.2542, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.784972374530889e-05, |
|
"loss": 2.4385, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7828377846700925e-05, |
|
"loss": 2.3266, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.780693132926851e-05, |
|
"loss": 2.3301, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7785384287539755e-05, |
|
"loss": 2.3554, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.776373681648586e-05, |
|
"loss": 2.3838, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7741989011520645e-05, |
|
"loss": 2.3361, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.772014096850018e-05, |
|
"loss": 2.2864, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.769819278372235e-05, |
|
"loss": 2.3955, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7676144553926414e-05, |
|
"loss": 2.3802, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.765399637629262e-05, |
|
"loss": 2.2856, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7631748348441705e-05, |
|
"loss": 2.3698, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.760940056843454e-05, |
|
"loss": 2.3305, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.758695313477166e-05, |
|
"loss": 2.4944, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.756440614639283e-05, |
|
"loss": 2.4622, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.754175970267661e-05, |
|
"loss": 2.3995, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.751901390343995e-05, |
|
"loss": 2.343, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7496168848937674e-05, |
|
"loss": 2.4388, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7473224639862116e-05, |
|
"loss": 2.3694, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.745018137734264e-05, |
|
"loss": 2.3485, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.742703916294521e-05, |
|
"loss": 2.46, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.740379809867193e-05, |
|
"loss": 2.3138, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.738045828696056e-05, |
|
"loss": 2.4117, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.735701983068416e-05, |
|
"loss": 2.3506, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7333482833150525e-05, |
|
"loss": 2.3711, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.730984739810183e-05, |
|
"loss": 2.2639, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.728611362971408e-05, |
|
"loss": 2.3749, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.726228163259673e-05, |
|
"loss": 2.4556, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7238351511792165e-05, |
|
"loss": 2.4233, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.721432337277529e-05, |
|
"loss": 2.3544, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.7190197321453014e-05, |
|
"loss": 2.3654, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.716597346416382e-05, |
|
"loss": 2.3356, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.7141651907677256e-05, |
|
"loss": 2.4522, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.7117232759193534e-05, |
|
"loss": 2.3104, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.709271612634298e-05, |
|
"loss": 2.4492, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.706810211718561e-05, |
|
"loss": 2.4437, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.7043390840210636e-05, |
|
"loss": 2.3099, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.701858240433597e-05, |
|
"loss": 2.351, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.6993676918907804e-05, |
|
"loss": 2.4756, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.696867449370005e-05, |
|
"loss": 2.418, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.6943575238913904e-05, |
|
"loss": 2.5026, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.691837926517737e-05, |
|
"loss": 2.4096, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.689308668354473e-05, |
|
"loss": 2.4101, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.686769760549607e-05, |
|
"loss": 2.4533, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.684221214293683e-05, |
|
"loss": 2.2988, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.681663040819724e-05, |
|
"loss": 2.3937, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.679095251403189e-05, |
|
"loss": 2.3954, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.676517857361917e-05, |
|
"loss": 2.3275, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.673930870056085e-05, |
|
"loss": 2.4748, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.67133430088815e-05, |
|
"loss": 2.2496, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.668728161302803e-05, |
|
"loss": 2.3226, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.666112462786919e-05, |
|
"loss": 2.4655, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.663487216869502e-05, |
|
"loss": 2.3751, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.660852435121642e-05, |
|
"loss": 2.4136, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.658208129156456e-05, |
|
"loss": 2.3612, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.655554310629041e-05, |
|
"loss": 2.3675, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.652890991236421e-05, |
|
"loss": 2.2831, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.650218182717498e-05, |
|
"loss": 2.3958, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.647535896852997e-05, |
|
"loss": 2.3236, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.6448441454654156e-05, |
|
"loss": 2.3904, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.642142940418973e-05, |
|
"loss": 2.3565, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.6394322936195556e-05, |
|
"loss": 2.5229, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.636712217014665e-05, |
|
"loss": 2.369, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.6339827225933665e-05, |
|
"loss": 2.3444, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6312438223862356e-05, |
|
"loss": 2.3387, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.628495528465305e-05, |
|
"loss": 2.3589, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.625737852944011e-05, |
|
"loss": 2.4779, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.62297080797714e-05, |
|
"loss": 2.4398, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.620194405760777e-05, |
|
"loss": 2.383, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6174086585322485e-05, |
|
"loss": 2.424, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6146135785700705e-05, |
|
"loss": 2.4289, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.611809178193896e-05, |
|
"loss": 2.225, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.608995469764455e-05, |
|
"loss": 2.3643, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.606172465683508e-05, |
|
"loss": 2.4278, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.6033401783937844e-05, |
|
"loss": 2.4464, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.600498620378932e-05, |
|
"loss": 2.3639, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.597647804163461e-05, |
|
"loss": 2.3595, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.5947877423126864e-05, |
|
"loss": 2.3569, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.591918447432675e-05, |
|
"loss": 2.4167, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.589039932170188e-05, |
|
"loss": 2.3959, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5861522092126313e-05, |
|
"loss": 2.3629, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5832552912879894e-05, |
|
"loss": 2.3978, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.580349191164777e-05, |
|
"loss": 2.4372, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5774339216519815e-05, |
|
"loss": 2.4422, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.574509495599003e-05, |
|
"loss": 2.3786, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.5715759258956025e-05, |
|
"loss": 2.3467, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.568633225471841e-05, |
|
"loss": 2.371, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.565681407298026e-05, |
|
"loss": 2.3776, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.5627204843846504e-05, |
|
"loss": 2.3036, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.559750469782339e-05, |
|
"loss": 2.446, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.5567713765817886e-05, |
|
"loss": 2.3521, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.553783217913712e-05, |
|
"loss": 2.3322, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.550786006948777e-05, |
|
"loss": 2.3966, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.547779756897554e-05, |
|
"loss": 2.35, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.544764481010451e-05, |
|
"loss": 2.3739, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.541740192577658e-05, |
|
"loss": 2.4817, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.538706904929092e-05, |
|
"loss": 2.3857, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.5356646314343344e-05, |
|
"loss": 2.3453, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.532613385502571e-05, |
|
"loss": 2.2916, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5295531805825355e-05, |
|
"loss": 2.338, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.526484030162449e-05, |
|
"loss": 2.2829, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5234059477699635e-05, |
|
"loss": 2.3761, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.520318946972097e-05, |
|
"loss": 2.4692, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.517223041375176e-05, |
|
"loss": 2.3689, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.514118244624779e-05, |
|
"loss": 2.4082, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.51100457040567e-05, |
|
"loss": 2.4152, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.507882032441745e-05, |
|
"loss": 2.389, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.504750644495965e-05, |
|
"loss": 2.4914, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.501610420370298e-05, |
|
"loss": 2.4066, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4984613739056635e-05, |
|
"loss": 2.3664, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4953035189818597e-05, |
|
"loss": 2.2608, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4921368695175145e-05, |
|
"loss": 2.3886, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.488961439470015e-05, |
|
"loss": 2.3459, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4857772428354536e-05, |
|
"loss": 2.3653, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4825842936485585e-05, |
|
"loss": 2.3518, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4793826059826394e-05, |
|
"loss": 2.5581, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4761721939495185e-05, |
|
"loss": 2.4307, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.472953071699475e-05, |
|
"loss": 2.4616, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.469725253421177e-05, |
|
"loss": 2.4261, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.466488753341623e-05, |
|
"loss": 2.404, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.4632435857260754e-05, |
|
"loss": 2.374, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.459989764878004e-05, |
|
"loss": 2.3282, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.456727305139013e-05, |
|
"loss": 2.3896, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.453456220888788e-05, |
|
"loss": 2.5145, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.4501765265450265e-05, |
|
"loss": 2.4032, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.446888236563377e-05, |
|
"loss": 2.3513, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.4435913654373726e-05, |
|
"loss": 2.2915, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.44028592769837e-05, |
|
"loss": 2.3697, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.4369719379154846e-05, |
|
"loss": 2.2917, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.433649410695525e-05, |
|
"loss": 2.3396, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.4303183606829304e-05, |
|
"loss": 2.3729, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.426978802559704e-05, |
|
"loss": 2.3824, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.423630751045351e-05, |
|
"loss": 2.3842, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.4202742208968115e-05, |
|
"loss": 2.333, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.416909226908398e-05, |
|
"loss": 2.3321, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.4135357839117235e-05, |
|
"loss": 2.2699, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.410153906775647e-05, |
|
"loss": 2.3899, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.406763610406197e-05, |
|
"loss": 2.3605, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.403364909746516e-05, |
|
"loss": 2.3266, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.399957819776783e-05, |
|
"loss": 2.4621, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.396542355514159e-05, |
|
"loss": 2.4612, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.3931185320127154e-05, |
|
"loss": 2.4084, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.389686364363365e-05, |
|
"loss": 2.3852, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.386245867693799e-05, |
|
"loss": 2.3827, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3827970571684226e-05, |
|
"loss": 2.2558, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3793399479882834e-05, |
|
"loss": 2.3745, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.375874555391006e-05, |
|
"loss": 2.3762, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3724008946507255e-05, |
|
"loss": 2.3857, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.368918981078019e-05, |
|
"loss": 2.3628, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3654288300198424e-05, |
|
"loss": 2.4098, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.361930456859455e-05, |
|
"loss": 2.4795, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3584238770163575e-05, |
|
"loss": 2.2823, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.354909105946223e-05, |
|
"loss": 2.4052, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.35138615914083e-05, |
|
"loss": 2.453, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.347855052127988e-05, |
|
"loss": 2.3922, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.344315800471479e-05, |
|
"loss": 2.3853, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.340768419770979e-05, |
|
"loss": 2.4103, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3372129256619965e-05, |
|
"loss": 2.4206, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3336493338158004e-05, |
|
"loss": 2.3072, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.330077659939349e-05, |
|
"loss": 2.4468, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.326497919775228e-05, |
|
"loss": 2.2803, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.32291012910157e-05, |
|
"loss": 2.3762, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.319314303731997e-05, |
|
"loss": 2.3452, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.3157104595155406e-05, |
|
"loss": 2.399, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.312098612336579e-05, |
|
"loss": 2.4619, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.308478778114764e-05, |
|
"loss": 2.3486, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.30485097280495e-05, |
|
"loss": 2.4331, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.301215212397127e-05, |
|
"loss": 2.3837, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.2975715129163455e-05, |
|
"loss": 2.3124, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.29391989042265e-05, |
|
"loss": 2.3178, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.290260361011007e-05, |
|
"loss": 2.3721, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2865929408112324e-05, |
|
"loss": 2.4246, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2829176459879226e-05, |
|
"loss": 2.4281, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2792344927403825e-05, |
|
"loss": 2.3949, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.2755434973025534e-05, |
|
"loss": 2.3241, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.271844675942942e-05, |
|
"loss": 2.4736, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.268138044964548e-05, |
|
"loss": 2.4091, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.264423620704796e-05, |
|
"loss": 2.2986, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.260701419535457e-05, |
|
"loss": 2.4634, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2569714578625796e-05, |
|
"loss": 2.4632, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.253233752126421e-05, |
|
"loss": 2.3386, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.249488318801369e-05, |
|
"loss": 2.3641, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2457351743958704e-05, |
|
"loss": 2.2827, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.241974335452364e-05, |
|
"loss": 2.3265, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.2382058185471986e-05, |
|
"loss": 2.4026, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.234429640290566e-05, |
|
"loss": 2.2985, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.2306458173264266e-05, |
|
"loss": 2.4331, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.226854366332437e-05, |
|
"loss": 2.3318, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.223055304019872e-05, |
|
"loss": 2.4336, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2192486471335585e-05, |
|
"loss": 2.3694, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2154344124517934e-05, |
|
"loss": 2.3591, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.211612616786277e-05, |
|
"loss": 2.3168, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2077832769820324e-05, |
|
"loss": 2.3961, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.203946409917337e-05, |
|
"loss": 2.401, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.200102032503645e-05, |
|
"loss": 2.3745, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.19625016168551e-05, |
|
"loss": 2.4112, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.192390814440519e-05, |
|
"loss": 2.3601, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.188524007779209e-05, |
|
"loss": 2.5197, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.1846497587449955e-05, |
|
"loss": 2.3009, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1807680844140995e-05, |
|
"loss": 2.2949, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.176879001895466e-05, |
|
"loss": 2.3835, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.172982528330696e-05, |
|
"loss": 2.3994, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.169078680893964e-05, |
|
"loss": 2.3407, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.165167476791949e-05, |
|
"loss": 2.3427, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.161248933263755e-05, |
|
"loss": 2.3075, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.157323067580834e-05, |
|
"loss": 2.3023, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.15338989704691e-05, |
|
"loss": 2.3777, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.149449438997908e-05, |
|
"loss": 2.3727, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.14550171080187e-05, |
|
"loss": 2.3647, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.1415467298588844e-05, |
|
"loss": 2.4701, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.137584513601005e-05, |
|
"loss": 2.2996, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.133615079492178e-05, |
|
"loss": 2.2806, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.129638445028162e-05, |
|
"loss": 2.3191, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.1256546277364516e-05, |
|
"loss": 2.3612, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.121663645176202e-05, |
|
"loss": 2.3404, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.1176655149381496e-05, |
|
"loss": 2.3706, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.1136602546445336e-05, |
|
"loss": 2.3546, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.109647881949022e-05, |
|
"loss": 2.3337, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.1056284145366315e-05, |
|
"loss": 2.4883, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.101601870123648e-05, |
|
"loss": 2.3847, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.0975682664575497e-05, |
|
"loss": 2.4195, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.093527621316932e-05, |
|
"loss": 2.3695, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.0894799525114244e-05, |
|
"loss": 2.3126, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.0854252778816154e-05, |
|
"loss": 2.2785, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.081363615298971e-05, |
|
"loss": 2.4195, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.0772949826657574e-05, |
|
"loss": 2.2698, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.073219397914964e-05, |
|
"loss": 2.42, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.069136879010221e-05, |
|
"loss": 2.3127, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.065047443945722e-05, |
|
"loss": 2.4099, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.060951110746145e-05, |
|
"loss": 2.2552, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.0568478974665724e-05, |
|
"loss": 2.3218, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.05273782219241e-05, |
|
"loss": 2.3357, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.048620903039308e-05, |
|
"loss": 2.381, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.044497158153085e-05, |
|
"loss": 2.4034, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.040366605709644e-05, |
|
"loss": 2.307, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.036229263914891e-05, |
|
"loss": 2.3937, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.032085151004659e-05, |
|
"loss": 2.3931, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.0279342852446234e-05, |
|
"loss": 2.3632, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.023776684930226e-05, |
|
"loss": 2.4364, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.019612368386592e-05, |
|
"loss": 2.2749, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.015441353968445e-05, |
|
"loss": 2.2606, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.011263660060035e-05, |
|
"loss": 2.3281, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.0070793050750525e-05, |
|
"loss": 2.3648, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.0028883074565434e-05, |
|
"loss": 2.2916, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9986906856768356e-05, |
|
"loss": 2.3686, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.994486458237453e-05, |
|
"loss": 2.3707, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.990275643669032e-05, |
|
"loss": 2.2938, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.986058260531249e-05, |
|
"loss": 2.3256, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9818343274127254e-05, |
|
"loss": 2.3937, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.977603862930955e-05, |
|
"loss": 2.3821, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.973366885732219e-05, |
|
"loss": 2.3767, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.969123414491507e-05, |
|
"loss": 2.2879, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.9648734679124276e-05, |
|
"loss": 2.3657, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.9606170647271324e-05, |
|
"loss": 2.293, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9563542236962315e-05, |
|
"loss": 2.3071, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.95208496360871e-05, |
|
"loss": 2.3659, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9478093032818446e-05, |
|
"loss": 2.3608, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.943527261561125e-05, |
|
"loss": 2.3578, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.939238857320164e-05, |
|
"loss": 2.3909, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.934944109460622e-05, |
|
"loss": 2.3294, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.930643036912115e-05, |
|
"loss": 2.3123, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.926335658632139e-05, |
|
"loss": 2.3545, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.922021993605983e-05, |
|
"loss": 2.4079, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.917702060846645e-05, |
|
"loss": 2.3649, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.913375879394749e-05, |
|
"loss": 2.3067, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.9090434683184595e-05, |
|
"loss": 2.3587, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.9047048467134014e-05, |
|
"loss": 2.3159, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.9003600337025716e-05, |
|
"loss": 2.2579, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.8960090484362556e-05, |
|
"loss": 2.3721, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.891651910091946e-05, |
|
"loss": 2.3236, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.887288637874254e-05, |
|
"loss": 2.3895, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.8829192510148295e-05, |
|
"loss": 2.3905, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.878543768772268e-05, |
|
"loss": 2.3239, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.874162210432037e-05, |
|
"loss": 2.3448, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.869774595306381e-05, |
|
"loss": 2.2926, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.8653809427342424e-05, |
|
"loss": 2.4272, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.860981272081175e-05, |
|
"loss": 2.2908, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.856575602739255e-05, |
|
"loss": 2.3301, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.8521639541270026e-05, |
|
"loss": 2.2928, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.84774634568929e-05, |
|
"loss": 2.3393, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.843322796897257e-05, |
|
"loss": 2.347, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.838893327248229e-05, |
|
"loss": 2.2124, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.8344579562656267e-05, |
|
"loss": 2.4755, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.830016703498881e-05, |
|
"loss": 2.4223, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.825569588523349e-05, |
|
"loss": 2.3186, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8211166309402255e-05, |
|
"loss": 2.3285, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.816657850376456e-05, |
|
"loss": 2.278, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.812193266484655e-05, |
|
"loss": 2.3484, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.807722898943013e-05, |
|
"loss": 2.3701, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.803246767455212e-05, |
|
"loss": 2.3725, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.798764891750341e-05, |
|
"loss": 2.3794, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.794277291582808e-05, |
|
"loss": 2.3723, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.789783986732249e-05, |
|
"loss": 2.2647, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.785284997003449e-05, |
|
"loss": 2.4012, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7807803422262454e-05, |
|
"loss": 2.3204, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.776270042255448e-05, |
|
"loss": 2.4077, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.771754116970745e-05, |
|
"loss": 2.2711, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.767232586276621e-05, |
|
"loss": 2.2796, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7627054701022694e-05, |
|
"loss": 2.4134, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7581727884014975e-05, |
|
"loss": 2.3704, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7536345611526466e-05, |
|
"loss": 2.3535, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7490908083585e-05, |
|
"loss": 2.2263, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.744541550046195e-05, |
|
"loss": 2.4121, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7399868062671365e-05, |
|
"loss": 2.4216, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.735426597096904e-05, |
|
"loss": 2.3946, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7308609426351705e-05, |
|
"loss": 2.3976, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.726289863005607e-05, |
|
"loss": 2.2879, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.721713378355798e-05, |
|
"loss": 2.333, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.717131508857151e-05, |
|
"loss": 2.3792, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.712544274704806e-05, |
|
"loss": 2.4156, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.707951696117551e-05, |
|
"loss": 2.3832, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.70335379333773e-05, |
|
"loss": 2.3718, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.698750586631152e-05, |
|
"loss": 2.3723, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.694142096287005e-05, |
|
"loss": 2.3251, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.689528342617765e-05, |
|
"loss": 2.3951, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.684909345959107e-05, |
|
"loss": 2.3366, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6802851266698155e-05, |
|
"loss": 2.324, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6756557051316944e-05, |
|
"loss": 2.4009, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.671021101749476e-05, |
|
"loss": 2.3438, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.666381336950732e-05, |
|
"loss": 2.3467, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.661736431185785e-05, |
|
"loss": 2.2955, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.657086404927617e-05, |
|
"loss": 2.4046, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.652431278671778e-05, |
|
"loss": 2.4039, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.6477710729362993e-05, |
|
"loss": 2.3361, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.6431058082615964e-05, |
|
"loss": 2.3366, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.638435505210386e-05, |
|
"loss": 2.3166, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.633760184367593e-05, |
|
"loss": 2.3143, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.629079866340255e-05, |
|
"loss": 2.3554, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.624394571757438e-05, |
|
"loss": 2.3985, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.6197043212701415e-05, |
|
"loss": 2.2855, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.6150091355512095e-05, |
|
"loss": 2.2603, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.61030903529524e-05, |
|
"loss": 2.2897, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.6056040412184885e-05, |
|
"loss": 2.2849, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.600894174058784e-05, |
|
"loss": 2.3384, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.596179454575435e-05, |
|
"loss": 2.2554, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.5914599035491325e-05, |
|
"loss": 2.4858, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.586735541781867e-05, |
|
"loss": 2.2883, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.582006390096833e-05, |
|
"loss": 2.3069, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.5772724693383355e-05, |
|
"loss": 2.2335, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.572533800371699e-05, |
|
"loss": 2.2282, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.567790404083179e-05, |
|
"loss": 2.3839, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.563042301379866e-05, |
|
"loss": 2.3956, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.5582895131895926e-05, |
|
"loss": 2.3072, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.553532060460848e-05, |
|
"loss": 2.3122, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.548769964162676e-05, |
|
"loss": 2.3593, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.54400324528459e-05, |
|
"loss": 2.3543, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.539231924836477e-05, |
|
"loss": 2.3294, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.534456023848507e-05, |
|
"loss": 2.3029, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.529675563371039e-05, |
|
"loss": 2.3401, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.524890564474528e-05, |
|
"loss": 2.385, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5201010482494325e-05, |
|
"loss": 2.3602, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.515307035806123e-05, |
|
"loss": 2.2311, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5105085482747844e-05, |
|
"loss": 2.2947, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.505705606805331e-05, |
|
"loss": 2.4305, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.500898232567304e-05, |
|
"loss": 2.3198, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.4960864467497854e-05, |
|
"loss": 2.3137, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.4912702705613005e-05, |
|
"loss": 2.2951, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.486449725229725e-05, |
|
"loss": 2.3566, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.481624832002193e-05, |
|
"loss": 2.339, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.476795612145003e-05, |
|
"loss": 2.415, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.471962086943524e-05, |
|
"loss": 2.3375, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.467124277702099e-05, |
|
"loss": 2.3554, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4622822057439544e-05, |
|
"loss": 2.3027, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.457435892411105e-05, |
|
"loss": 2.3329, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.452585359064264e-05, |
|
"loss": 2.3727, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.447730627082738e-05, |
|
"loss": 2.2927, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.442871717864344e-05, |
|
"loss": 2.4039, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.438008652825309e-05, |
|
"loss": 2.3189, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.433141453400178e-05, |
|
"loss": 2.3741, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.428270141041718e-05, |
|
"loss": 2.4139, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.4233947372208266e-05, |
|
"loss": 2.4032, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.418515263426431e-05, |
|
"loss": 2.3638, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.4136317411654015e-05, |
|
"loss": 2.2921, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.408744191962449e-05, |
|
"loss": 2.3825, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.403852637360037e-05, |
|
"loss": 2.2529, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.398957098918279e-05, |
|
"loss": 2.3247, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.394057598214852e-05, |
|
"loss": 2.3056, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.389154156844895e-05, |
|
"loss": 2.3573, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.384246796420917e-05, |
|
"loss": 2.3496, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.379335538572699e-05, |
|
"loss": 2.3329, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.374420404947201e-05, |
|
"loss": 2.3825, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.369501417208467e-05, |
|
"loss": 2.2246, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.3645785970375284e-05, |
|
"loss": 2.3993, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.359651966132308e-05, |
|
"loss": 2.3517, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.354721546207524e-05, |
|
"loss": 2.3539, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.3497873589945955e-05, |
|
"loss": 2.3726, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.344849426241548e-05, |
|
"loss": 2.3264, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.339907769712914e-05, |
|
"loss": 2.2947, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.33496241118964e-05, |
|
"loss": 2.2463, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.3300133724689894e-05, |
|
"loss": 2.4127, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.325060675364447e-05, |
|
"loss": 2.2724, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.32010434170562e-05, |
|
"loss": 2.3135, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.315144393338147e-05, |
|
"loss": 2.3629, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.310180852123598e-05, |
|
"loss": 2.3314, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.305213739939377e-05, |
|
"loss": 2.3761, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.300243078678631e-05, |
|
"loss": 2.2618, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.295268890250146e-05, |
|
"loss": 2.3752, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.29029119657826e-05, |
|
"loss": 2.3157, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.285310019602754e-05, |
|
"loss": 2.2982, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.280325381278767e-05, |
|
"loss": 2.4425, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.275337303576692e-05, |
|
"loss": 2.4271, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.270345808482082e-05, |
|
"loss": 2.3309, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.2653509179955524e-05, |
|
"loss": 2.4269, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.260352654132686e-05, |
|
"loss": 2.3242, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.2553510389239313e-05, |
|
"loss": 2.3107, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.2503460944145114e-05, |
|
"loss": 2.3688, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.24533784266432e-05, |
|
"loss": 2.324, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.240326305747831e-05, |
|
"loss": 2.3301, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.235311505753996e-05, |
|
"loss": 2.3624, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.23029346478615e-05, |
|
"loss": 2.3298, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.2252722049619147e-05, |
|
"loss": 2.352, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.220247748413094e-05, |
|
"loss": 2.3591, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.215220117285589e-05, |
|
"loss": 2.2382, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.210189333739285e-05, |
|
"loss": 2.3135, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.2051554199479706e-05, |
|
"loss": 2.2861, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.200118398099225e-05, |
|
"loss": 2.2897, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.1950782903943285e-05, |
|
"loss": 2.4072, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.190035119048164e-05, |
|
"loss": 2.3306, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.184988906289114e-05, |
|
"loss": 2.3509, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.179939674358971e-05, |
|
"loss": 2.4062, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1748874455128325e-05, |
|
"loss": 2.3785, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1698322420190055e-05, |
|
"loss": 2.3933, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1647740861589074e-05, |
|
"loss": 2.2905, |
|
"step": 3100 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 7482, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 4.129785211969536e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|