|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 3405, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.865102639296188e-07, |
|
"loss": 7.6534, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.9325513196480943e-06, |
|
"loss": 7.5632, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.865102639296189e-06, |
|
"loss": 7.3976, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.797653958944282e-06, |
|
"loss": 7.9288, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1730205278592377e-05, |
|
"loss": 7.9065, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4662756598240471e-05, |
|
"loss": 7.6023, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7595307917888564e-05, |
|
"loss": 7.456, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.052785923753666e-05, |
|
"loss": 7.4133, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.3460410557184755e-05, |
|
"loss": 6.8687, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.6392961876832843e-05, |
|
"loss": 6.8713, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9325513196480942e-05, |
|
"loss": 6.2954, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 6.2245, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.519061583577713e-05, |
|
"loss": 5.6886, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.8123167155425224e-05, |
|
"loss": 5.7786, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.105571847507332e-05, |
|
"loss": 5.48, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.398826979472141e-05, |
|
"loss": 5.4462, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.692082111436951e-05, |
|
"loss": 5.3462, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.98533724340176e-05, |
|
"loss": 5.0227, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.2785923753665686e-05, |
|
"loss": 5.1957, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.571847507331378e-05, |
|
"loss": 5.0037, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.8651026392961884e-05, |
|
"loss": 4.9914, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.158357771260998e-05, |
|
"loss": 4.7335, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.451612903225807e-05, |
|
"loss": 4.6138, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.744868035190616e-05, |
|
"loss": 4.9263, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.038123167155426e-05, |
|
"loss": 4.9091, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.331378299120235e-05, |
|
"loss": 4.75, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.624633431085045e-05, |
|
"loss": 4.5122, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.917888563049854e-05, |
|
"loss": 4.5551, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.211143695014664e-05, |
|
"loss": 4.5593, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.504398826979473e-05, |
|
"loss": 4.2632, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.797653958944282e-05, |
|
"loss": 4.4557, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.090909090909092e-05, |
|
"loss": 4.4678, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.384164222873902e-05, |
|
"loss": 4.3127, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.677419354838711e-05, |
|
"loss": 4.0954, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.97067448680352e-05, |
|
"loss": 3.9996, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00010263929618768328, |
|
"loss": 4.1799, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010557184750733137, |
|
"loss": 3.8218, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00010850439882697947, |
|
"loss": 4.0961, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00011143695014662756, |
|
"loss": 4.1024, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00011436950146627568, |
|
"loss": 3.9372, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00011730205278592377, |
|
"loss": 3.9716, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012023460410557186, |
|
"loss": 3.7182, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012316715542521996, |
|
"loss": 3.7256, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012609970674486805, |
|
"loss": 3.9024, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012903225806451613, |
|
"loss": 3.7147, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00013196480938416422, |
|
"loss": 3.5153, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001348973607038123, |
|
"loss": 3.5249, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00013782991202346043, |
|
"loss": 3.4098, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00014076246334310852, |
|
"loss": 3.5465, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001436950146627566, |
|
"loss": 3.4768, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001466275659824047, |
|
"loss": 3.4544, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00014956011730205278, |
|
"loss": 3.6389, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001524926686217009, |
|
"loss": 3.4381, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00015542521994134898, |
|
"loss": 3.4255, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00015835777126099707, |
|
"loss": 3.3577, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00016129032258064516, |
|
"loss": 3.5253, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00016422287390029328, |
|
"loss": 3.3494, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00016715542521994137, |
|
"loss": 2.9786, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017008797653958945, |
|
"loss": 3.2221, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017302052785923754, |
|
"loss": 2.9849, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017595307917888563, |
|
"loss": 3.3148, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017888563049853372, |
|
"loss": 3.0488, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00018181818181818183, |
|
"loss": 3.0925, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00018475073313782992, |
|
"loss": 2.873, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00018768328445747804, |
|
"loss": 3.0446, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019061583577712613, |
|
"loss": 3.1887, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019354838709677422, |
|
"loss": 2.9703, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001964809384164223, |
|
"loss": 3.0357, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001994134897360704, |
|
"loss": 3.2415, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00019999915896977904, |
|
"loss": 3.0616, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001999957423087518, |
|
"loss": 2.833, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00019998969754225768, |
|
"loss": 3.1048, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001999810248291663, |
|
"loss": 2.9998, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00019996972439741538, |
|
"loss": 2.938, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019995579654400483, |
|
"loss": 2.8948, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019993924163498893, |
|
"loss": 2.9246, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001999200601054667, |
|
"loss": 2.829, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019989825245957037, |
|
"loss": 3.0443, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019987381927045233, |
|
"loss": 2.8411, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019984676118026985, |
|
"loss": 3.0017, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001998170789001684, |
|
"loss": 2.9336, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019978477321026282, |
|
"loss": 2.7251, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019974984495961683, |
|
"loss": 2.9361, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019971229506622083, |
|
"loss": 2.6362, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001996721245169676, |
|
"loss": 3.0726, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019962933436762646, |
|
"loss": 2.6445, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00019958392574281558, |
|
"loss": 2.7365, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001995358998359724, |
|
"loss": 2.6591, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019948525790932205, |
|
"loss": 2.7724, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019943200129384444, |
|
"loss": 2.8148, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019937613138923922, |
|
"loss": 2.7555, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019931764966388895, |
|
"loss": 2.712, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019925655765482046, |
|
"loss": 2.759, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019919285696766453, |
|
"loss": 2.7315, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001991265492766137, |
|
"loss": 2.6124, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00019905763632437827, |
|
"loss": 2.7523, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00019898611992214034, |
|
"loss": 2.6089, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00019891200194950643, |
|
"loss": 2.7065, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.000198835284354458, |
|
"loss": 2.709, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001987559691533002, |
|
"loss": 2.8017, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00019867405843060896, |
|
"loss": 2.5728, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00019858955433917604, |
|
"loss": 2.6404, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001985024590999527, |
|
"loss": 2.5759, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001984127750019911, |
|
"loss": 2.7597, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019832050440238432, |
|
"loss": 2.636, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019822564972620427, |
|
"loss": 2.6216, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019812821346643807, |
|
"loss": 2.6913, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019802819818392237, |
|
"loss": 2.535, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019792560650727618, |
|
"loss": 2.7263, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019782044113283181, |
|
"loss": 2.5684, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019771270482456385, |
|
"loss": 2.5042, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00019760240041401673, |
|
"loss": 2.5333, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00019748953080023007, |
|
"loss": 2.549, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019737409894966267, |
|
"loss": 2.5871, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019725610789611452, |
|
"loss": 2.7041, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00019713556074064695, |
|
"loss": 2.5114, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00019701246065150123, |
|
"loss": 2.6314, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019688681086401527, |
|
"loss": 2.3458, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019675861468053865, |
|
"loss": 2.5374, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00019662787547034569, |
|
"loss": 2.7692, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019649459666954704, |
|
"loss": 2.4264, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00019635878178099928, |
|
"loss": 2.4306, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019622043437421294, |
|
"loss": 2.4421, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00019607955808525853, |
|
"loss": 2.5787, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001959361566166712, |
|
"loss": 2.576, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00019579023373735322, |
|
"loss": 2.4709, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00019564179328247506, |
|
"loss": 2.4937, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00019549083915337451, |
|
"loss": 2.5605, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00019533737531745424, |
|
"loss": 2.4941, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00019518140580807744, |
|
"loss": 2.5547, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00019502293472446186, |
|
"loss": 2.46, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.000194861966231572, |
|
"loss": 2.6948, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00019469850456000975, |
|
"loss": 2.5352, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001945325540059032, |
|
"loss": 2.492, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001943641189307936, |
|
"loss": 2.5583, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00019419320376152084, |
|
"loss": 2.6595, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00019401981299010712, |
|
"loss": 2.3485, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.0001938439511736388, |
|
"loss": 2.4808, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.0001936656229341466, |
|
"loss": 2.58, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00019348483295848442, |
|
"loss": 2.5846, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019330158599820574, |
|
"loss": 2.5121, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00019311588686943897, |
|
"loss": 2.5782, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019292774045276086, |
|
"loss": 2.4354, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019273715169306827, |
|
"loss": 2.3542, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00019254412559944806, |
|
"loss": 2.4883, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00019234866724504555, |
|
"loss": 2.2913, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00019215078176693112, |
|
"loss": 2.5519, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001919504743659653, |
|
"loss": 2.2724, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00019174775030666202, |
|
"loss": 2.5216, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001915426149170502, |
|
"loss": 2.3688, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00019133507358853373, |
|
"loss": 2.6479, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00019112513177574992, |
|
"loss": 2.2218, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00019091279499642593, |
|
"loss": 2.3275, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00019069806883123387, |
|
"loss": 2.5138, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00019048095892364408, |
|
"loss": 2.4388, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00019026147097977693, |
|
"loss": 2.4809, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00019003961076825263, |
|
"loss": 2.4191, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00018981538412003978, |
|
"loss": 2.3277, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00018958879692830216, |
|
"loss": 2.3928, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.0001893598551482436, |
|
"loss": 2.3763, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00018912856479695185, |
|
"loss": 2.3602, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00018889493195323997, |
|
"loss": 2.3453, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00018865896275748704, |
|
"loss": 2.5283, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00018842066341147635, |
|
"loss": 2.4586, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00018818004017823276, |
|
"loss": 2.443, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00018793709938185789, |
|
"loss": 2.3027, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001876918474073639, |
|
"loss": 2.3587, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018744429070050578, |
|
"loss": 2.355, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018719443576761195, |
|
"loss": 2.5382, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00018694228917541313, |
|
"loss": 2.2777, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001866878575508698, |
|
"loss": 2.4265, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00018643114758099818, |
|
"loss": 2.4205, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00018617216601269423, |
|
"loss": 2.4819, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00018591091965255654, |
|
"loss": 2.2673, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00018564741536670728, |
|
"loss": 2.5598, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00018538166008061192, |
|
"loss": 2.1621, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.000185113660778897, |
|
"loss": 2.4109, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00018484342450516671, |
|
"loss": 2.4062, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00018457095836181778, |
|
"loss": 2.3055, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00018429626950985267, |
|
"loss": 2.4536, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00018401936516869148, |
|
"loss": 2.3542, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00018374025261598225, |
|
"loss": 2.2079, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00018345893918740952, |
|
"loss": 2.3577, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00018317543227650172, |
|
"loss": 2.5193, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.0001828897393344367, |
|
"loss": 2.4154, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.000182601867869846, |
|
"loss": 2.2691, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00018231182544861741, |
|
"loss": 2.3977, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00018201961969369624, |
|
"loss": 2.3604, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00018172525828488495, |
|
"loss": 2.4388, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00018142874895864112, |
|
"loss": 2.4306, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001811300995078744, |
|
"loss": 2.4967, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00018082931778174154, |
|
"loss": 2.3996, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001805264116854401, |
|
"loss": 2.3246, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001802213891800007, |
|
"loss": 2.3288, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001799142582820778, |
|
"loss": 2.4168, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00017960502706373903, |
|
"loss": 2.4643, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00017929370365225297, |
|
"loss": 2.3672, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00017898029622987556, |
|
"loss": 2.4626, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00017866481303363514, |
|
"loss": 2.1713, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00017834726235511585, |
|
"loss": 2.3403, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00017802765254023981, |
|
"loss": 2.3151, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00017770599198904763, |
|
"loss": 2.3607, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00017738228915547784, |
|
"loss": 2.3208, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001770565525471445, |
|
"loss": 2.4054, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00017672879072511374, |
|
"loss": 2.2819, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00017639901230367868, |
|
"loss": 2.1317, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00017606722595013308, |
|
"loss": 2.4226, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.0001757334403845435, |
|
"loss": 2.339, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00017539766437952014, |
|
"loss": 2.2972, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0001750599067599863, |
|
"loss": 2.2524, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00017472017640294643, |
|
"loss": 2.2363, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00017437848223725267, |
|
"loss": 2.2668, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00017403483324337056, |
|
"loss": 2.1602, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.0001736892384531426, |
|
"loss": 2.4429, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00017334170694955102, |
|
"loss": 2.4132, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00017299224786647925, |
|
"loss": 2.4951, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00017264087038847156, |
|
"loss": 2.2886, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00017228758375049185, |
|
"loss": 2.446, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00017193239723768092, |
|
"loss": 2.3134, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00017157532018511237, |
|
"loss": 2.2684, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00017121636197754735, |
|
"loss": 2.2554, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0001708555320491878, |
|
"loss": 2.3473, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00017049283988342855, |
|
"loss": 2.2431, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.0001701282950126081, |
|
"loss": 2.2468, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00016976190701775806, |
|
"loss": 2.2984, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00016939368552835137, |
|
"loss": 2.3279, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00016902364022204912, |
|
"loss": 2.2503, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00016865178082444638, |
|
"loss": 2.0986, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00016827811710881635, |
|
"loss": 2.3541, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0001679026588958538, |
|
"loss": 2.2958, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0001675254160534166, |
|
"loss": 2.278, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00016714639849626668, |
|
"loss": 2.2633, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00016676561618580933, |
|
"loss": 2.3342, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00016638307912983136, |
|
"loss": 2.266, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00016599879738223814, |
|
"loss": 2.2961, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00016561278104278927, |
|
"loss": 2.3334, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0001652250402568332, |
|
"loss": 2.2961, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00016483558521504068, |
|
"loss": 2.118, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00016444442615313668, |
|
"loss": 2.4154, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0001640515733516317, |
|
"loss": 2.3285, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0001636570371355512, |
|
"loss": 2.2754, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0001632608278741646, |
|
"loss": 2.2198, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00016286295598071253, |
|
"loss": 2.1981, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00016246343191213315, |
|
"loss": 2.1861, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00016206226616878749, |
|
"loss": 2.094, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00016165946929418322, |
|
"loss": 2.1678, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00016125505187469775, |
|
"loss": 2.2353, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.0001608490245393, |
|
"loss": 2.1292, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00016044139795927086, |
|
"loss": 2.2069, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00016003218284792298, |
|
"loss": 2.2202, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0001596213899603189, |
|
"loss": 2.2953, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0001592090300929887, |
|
"loss": 2.3847, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00015879511408364605, |
|
"loss": 2.3227, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0001583796528109033, |
|
"loss": 2.3445, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00015796265719398593, |
|
"loss": 2.2701, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.000157544138192445, |
|
"loss": 2.2308, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00015712410680586973, |
|
"loss": 2.3682, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00015670257407359792, |
|
"loss": 2.1626, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00015627955107442605, |
|
"loss": 2.1219, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00015585504892631807, |
|
"loss": 2.2555, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00015542907878611314, |
|
"loss": 2.1176, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00015500165184923248, |
|
"loss": 2.393, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00015457277934938509, |
|
"loss": 2.158, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00015414247255827253, |
|
"loss": 2.2454, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00015371074278529257, |
|
"loss": 2.3419, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00015327760137724212, |
|
"loss": 2.1038, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00015284305971801883, |
|
"loss": 2.2645, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00015240712922832215, |
|
"loss": 2.2823, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00015196982136535288, |
|
"loss": 2.2989, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00015153114762251222, |
|
"loss": 2.1625, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00015109111952909968, |
|
"loss": 2.0568, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00015064974865001008, |
|
"loss": 2.2737, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00015020704658542946, |
|
"loss": 2.2047, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00014976302497053036, |
|
"loss": 2.4024, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00014931769547516603, |
|
"loss": 2.154, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00014887106980356358, |
|
"loss": 2.2493, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.0001484231596940165, |
|
"loss": 2.2429, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00014797397691857612, |
|
"loss": 2.2164, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.0001475235332827421, |
|
"loss": 2.2448, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.0001470718406251523, |
|
"loss": 2.3309, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.0001466189108172717, |
|
"loss": 2.1357, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00014616475576308005, |
|
"loss": 2.3023, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00014570938739875944, |
|
"loss": 2.3552, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.0001452528176923803, |
|
"loss": 2.2292, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00014479505864358692, |
|
"loss": 2.1244, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00014433612228328214, |
|
"loss": 2.3995, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001438760206733111, |
|
"loss": 2.254, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00014341476590614424, |
|
"loss": 2.1234, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00014295237010455945, |
|
"loss": 2.2274, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.0001424888454213235, |
|
"loss": 2.1644, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00014202420403887258, |
|
"loss": 2.2781, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00014155845816899224, |
|
"loss": 2.149, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00014109162005249633, |
|
"loss": 2.2153, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.0001406237019589053, |
|
"loss": 2.0983, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00014015471618612378, |
|
"loss": 2.3075, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00013968467506011723, |
|
"loss": 2.2857, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00013921359093458825, |
|
"loss": 2.2647, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.0001387414761906516, |
|
"loss": 2.2328, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 2.1563, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00013779420450712288, |
|
"loss": 2.3384, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00013731907246388977, |
|
"loss": 2.333, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.0001368429595943124, |
|
"loss": 2.1938, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00013636587841167207, |
|
"loss": 2.2841, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00013588784145469914, |
|
"loss": 2.2626, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00013540886128724403, |
|
"loss": 2.1829, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.0001349289504979467, |
|
"loss": 2.2862, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.0001344481216999058, |
|
"loss": 2.2542, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00013396638753034734, |
|
"loss": 2.3202, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00013348376065029245, |
|
"loss": 2.2086, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.0001330002537442246, |
|
"loss": 2.0535, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00013251587951975625, |
|
"loss": 2.3245, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00013203065070729495, |
|
"loss": 2.272, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00013154458005970856, |
|
"loss": 2.2416, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00013105768035199034, |
|
"loss": 2.2201, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.0001305699643809229, |
|
"loss": 2.2501, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00013008144496474213, |
|
"loss": 2.3365, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00012959213494280017, |
|
"loss": 2.3172, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00012910204717522805, |
|
"loss": 2.2071, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.0001286111945425975, |
|
"loss": 2.0843, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.0001281195899455827, |
|
"loss": 2.1805, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00012762724630462112, |
|
"loss": 2.2205, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00012713417655957376, |
|
"loss": 2.3792, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00012664039366938533, |
|
"loss": 2.2761, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00012614591061174351, |
|
"loss": 2.2718, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00012565074038273788, |
|
"loss": 2.3257, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00012515489599651848, |
|
"loss": 2.108, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.0001246583904849535, |
|
"loss": 2.2152, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00012416123689728707, |
|
"loss": 2.3599, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00012366344829979611, |
|
"loss": 2.1716, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.000123165037775447, |
|
"loss": 2.244, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.0001226660184235517, |
|
"loss": 2.0568, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00012216640335942352, |
|
"loss": 2.1132, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00012166620571403231, |
|
"loss": 2.0462, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.0001211654386336595, |
|
"loss": 2.1835, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00012066411527955243, |
|
"loss": 2.1925, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00012016224882757862, |
|
"loss": 2.2295, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00011965985246787927, |
|
"loss": 2.0637, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.0001191569394045228, |
|
"loss": 2.2134, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00011865352285515765, |
|
"loss": 2.0748, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00011814961605066504, |
|
"loss": 2.2397, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00011764523223481112, |
|
"loss": 2.2193, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00011714038466389891, |
|
"loss": 2.1098, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00011663508660641999, |
|
"loss": 2.2415, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00011612935134270566, |
|
"loss": 2.1913, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00011562319216457795, |
|
"loss": 2.0807, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00011511662237500032, |
|
"loss": 2.111, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00011460965528772792, |
|
"loss": 2.1528, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00011410230422695778, |
|
"loss": 2.1324, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00011359458252697864, |
|
"loss": 2.0786, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00011308650353182036, |
|
"loss": 2.1816, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.0001125780805949034, |
|
"loss": 2.252, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.00011206932707868762, |
|
"loss": 2.1158, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.00011156025635432135, |
|
"loss": 2.1911, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00011105088180128976, |
|
"loss": 2.2904, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00011054121680706328, |
|
"loss": 2.1534, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 0.00011003127476674591, |
|
"loss": 2.1485, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00010952106908272284, |
|
"loss": 2.1895, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 0.00010901061316430847, |
|
"loss": 2.1565, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00010849992042739394, |
|
"loss": 2.0069, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 0.00010798900429409444, |
|
"loss": 2.2347, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00010747787819239656, |
|
"loss": 2.3373, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00010696655555580524, |
|
"loss": 2.0499, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 0.00010645504982299082, |
|
"loss": 2.2529, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00010594337443743585, |
|
"loss": 2.1572, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.00010543154284708163, |
|
"loss": 2.137, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00010491956850397495, |
|
"loss": 2.1686, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 0.00010440746486391439, |
|
"loss": 2.1664, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00010389524538609681, |
|
"loss": 2.2125, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 0.00010338292353276341, |
|
"loss": 2.1918, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00010287051276884618, |
|
"loss": 2.2608, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00010235802656161384, |
|
"loss": 2.1754, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 0.00010184547838031782, |
|
"loss": 2.2016, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00010133288169583848, |
|
"loss": 2.0677, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 0.00010082024998033092, |
|
"loss": 2.1323, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00010030759670687093, |
|
"loss": 2.1701, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.979493534910085e-05, |
|
"loss": 2.1928, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.928227938087553e-05, |
|
"loss": 2.2373, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.87696422759082e-05, |
|
"loss": 2.1827, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.825703750741626e-05, |
|
"loss": 2.1034, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.774447854776725e-05, |
|
"loss": 2.2465, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.723197886812481e-05, |
|
"loss": 2.3259, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.671955193809452e-05, |
|
"loss": 2.1901, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.620721122536998e-05, |
|
"loss": 2.0353, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.569497019537872e-05, |
|
"loss": 2.2914, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.518284231092848e-05, |
|
"loss": 2.0966, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.467084103185329e-05, |
|
"loss": 2.0779, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.41589798146597e-05, |
|
"loss": 2.1169, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.36472721121731e-05, |
|
"loss": 2.3157, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.313573137318419e-05, |
|
"loss": 2.1449, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.262437104209555e-05, |
|
"loss": 2.1209, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.21132045585682e-05, |
|
"loss": 2.1572, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.160224535716842e-05, |
|
"loss": 2.1399, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.109150686701476e-05, |
|
"loss": 2.1079, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.058100251142483e-05, |
|
"loss": 2.0942, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 9.007074570756278e-05, |
|
"loss": 1.9655, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.956074986608661e-05, |
|
"loss": 2.0805, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.905102839079559e-05, |
|
"loss": 2.2586, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.854159467827807e-05, |
|
"loss": 2.0632, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.803246211755931e-05, |
|
"loss": 2.2206, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.752364408974974e-05, |
|
"loss": 2.3172, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.701515396769311e-05, |
|
"loss": 2.0313, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.650700511561514e-05, |
|
"loss": 2.1009, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.599921088877225e-05, |
|
"loss": 2.074, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.549178463310046e-05, |
|
"loss": 2.0483, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.498473968486476e-05, |
|
"loss": 2.1001, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.447808937030856e-05, |
|
"loss": 2.2748, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.39718470053034e-05, |
|
"loss": 2.1422, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.346602589499912e-05, |
|
"loss": 2.204, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.296063933347388e-05, |
|
"loss": 2.235, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.245570060338512e-05, |
|
"loss": 2.0509, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.195122297562023e-05, |
|
"loss": 2.219, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.144721970894786e-05, |
|
"loss": 2.0662, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.094370404966942e-05, |
|
"loss": 2.0488, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.044068923127087e-05, |
|
"loss": 2.0054, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.993818847407506e-05, |
|
"loss": 2.1268, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.943621498489415e-05, |
|
"loss": 2.0403, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.893478195668259e-05, |
|
"loss": 1.9364, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.843390256819034e-05, |
|
"loss": 2.1122, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.793358998361642e-05, |
|
"loss": 2.0691, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.743385735226309e-05, |
|
"loss": 2.1533, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.693471780819019e-05, |
|
"loss": 2.0395, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.643618446986988e-05, |
|
"loss": 2.2597, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.593827043984198e-05, |
|
"loss": 2.07, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.544098880436946e-05, |
|
"loss": 2.0882, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.494435263309466e-05, |
|
"loss": 2.0713, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.44483749786957e-05, |
|
"loss": 2.1475, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.395306887654341e-05, |
|
"loss": 1.9508, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.345844734435884e-05, |
|
"loss": 2.1495, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.296452338187094e-05, |
|
"loss": 2.1613, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.247130997047511e-05, |
|
"loss": 2.164, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.197882007289189e-05, |
|
"loss": 2.003, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.148706663282629e-05, |
|
"loss": 2.169, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.09960625746277e-05, |
|
"loss": 2.2235, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.050582080294996e-05, |
|
"loss": 2.0062, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.00163542024125e-05, |
|
"loss": 2.2334, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.952767563726154e-05, |
|
"loss": 2.0814, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.903979795103199e-05, |
|
"loss": 2.0644, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.855273396620998e-05, |
|
"loss": 2.1427, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.806649648389567e-05, |
|
"loss": 2.2253, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.758109828346709e-05, |
|
"loss": 2.0299, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.709655212224398e-05, |
|
"loss": 2.0782, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.661287073515275e-05, |
|
"loss": 1.9985, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.613006683439169e-05, |
|
"loss": 2.0108, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.564815310909667e-05, |
|
"loss": 2.2069, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.516714222500794e-05, |
|
"loss": 1.9994, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.468704682413711e-05, |
|
"loss": 2.1872, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.420787952443491e-05, |
|
"loss": 2.0854, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.372965291945953e-05, |
|
"loss": 2.2533, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.325237957804563e-05, |
|
"loss": 2.0482, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.277607204397408e-05, |
|
"loss": 2.147, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.230074283564222e-05, |
|
"loss": 2.0424, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.182640444573487e-05, |
|
"loss": 2.175, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.1353069340896e-05, |
|
"loss": 2.2443, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.0880749961400964e-05, |
|
"loss": 2.0684, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.040945872082978e-05, |
|
"loss": 2.1364, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.993920800574069e-05, |
|
"loss": 2.1039, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.947001017534465e-05, |
|
"loss": 2.115, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.900187756118055e-05, |
|
"loss": 2.1173, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.853482246679104e-05, |
|
"loss": 2.1462, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.806885716739923e-05, |
|
"loss": 1.9374, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.760399390958606e-05, |
|
"loss": 2.2031, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.714024491096841e-05, |
|
"loss": 2.056, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.6677622359878036e-05, |
|
"loss": 2.1539, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.6216138415041105e-05, |
|
"loss": 2.0291, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.575580520525885e-05, |
|
"loss": 2.067, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.529663482908864e-05, |
|
"loss": 2.0761, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.483863935452605e-05, |
|
"loss": 1.9784, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.4381830818687705e-05, |
|
"loss": 2.1079, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.3926221227494845e-05, |
|
"loss": 2.1377, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.3471822555357896e-05, |
|
"loss": 2.1082, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.301864674486168e-05, |
|
"loss": 1.9832, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.2566705706451566e-05, |
|
"loss": 2.1141, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.2116011318120405e-05, |
|
"loss": 1.9963, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.1666575425096396e-05, |
|
"loss": 2.0701, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.121840983953175e-05, |
|
"loss": 2.0663, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.07715263401922e-05, |
|
"loss": 2.1082, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.032593667214752e-05, |
|
"loss": 2.0038, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.988165254646277e-05, |
|
"loss": 1.9715, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.943868563989046e-05, |
|
"loss": 2.0917, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.899704759456377e-05, |
|
"loss": 2.0824, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.855675001769052e-05, |
|
"loss": 2.1039, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.811780448124812e-05, |
|
"loss": 2.0707, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.7680222521679405e-05, |
|
"loss": 2.0922, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.724401563958938e-05, |
|
"loss": 1.9923, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.68091952994431e-05, |
|
"loss": 2.0032, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.637577292926428e-05, |
|
"loss": 2.0474, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.594375992033494e-05, |
|
"loss": 2.2291, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.551316762689602e-05, |
|
"loss": 2.1552, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.5084007365848936e-05, |
|
"loss": 2.18, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.465629041645819e-05, |
|
"loss": 2.1891, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.423002802005494e-05, |
|
"loss": 2.0704, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.3805231379741564e-05, |
|
"loss": 2.257, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.338191166009713e-05, |
|
"loss": 2.0064, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.296007998688405e-05, |
|
"loss": 2.2268, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.2539747446755565e-05, |
|
"loss": 2.0656, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.2120925086964534e-05, |
|
"loss": 2.0588, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.170362391507295e-05, |
|
"loss": 2.1166, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.1287854898662705e-05, |
|
"loss": 2.0328, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.0873628965047315e-05, |
|
"loss": 2.0747, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.046095700098466e-05, |
|
"loss": 1.9675, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.0049849852391e-05, |
|
"loss": 2.2333, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.9640318324055815e-05, |
|
"loss": 2.1956, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.923237317935787e-05, |
|
"loss": 2.0067, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.882602513998234e-05, |
|
"loss": 1.997, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.84212848856389e-05, |
|
"loss": 2.0763, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.801816305378124e-05, |
|
"loss": 2.0233, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.761667023932738e-05, |
|
"loss": 2.1154, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.7216816994381175e-05, |
|
"loss": 2.0546, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.6818613827955084e-05, |
|
"loss": 2.1157, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.6422071205693866e-05, |
|
"loss": 2.0575, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.60271995495996e-05, |
|
"loss": 2.1667, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.563400923775774e-05, |
|
"loss": 2.1915, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.524251060406434e-05, |
|
"loss": 2.0662, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.485271393795453e-05, |
|
"loss": 2.0043, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.446462948413193e-05, |
|
"loss": 2.1659, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.407826744229957e-05, |
|
"loss": 2.2091, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.3693637966891755e-05, |
|
"loss": 2.122, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.331075116680715e-05, |
|
"loss": 2.1096, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.292961710514317e-05, |
|
"loss": 2.1768, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.255024579893138e-05, |
|
"loss": 2.0958, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.217264721887433e-05, |
|
"loss": 1.9104, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.179683128908352e-05, |
|
"loss": 2.125, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.1422807886818463e-05, |
|
"loss": 2.2961, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.105058684222723e-05, |
|
"loss": 2.1653, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.0680177938087886e-05, |
|
"loss": 2.1683, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.0311590909551625e-05, |
|
"loss": 1.9607, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.9944835443886743e-05, |
|
"loss": 2.133, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.957992118022408e-05, |
|
"loss": 2.0095, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.9216857709303713e-05, |
|
"loss": 2.0514, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.8855654573222825e-05, |
|
"loss": 1.9734, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.8496321265184977e-05, |
|
"loss": 2.1773, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.8138867229250588e-05, |
|
"loss": 1.9544, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.7783301860088705e-05, |
|
"loss": 2.1265, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.742963450273016e-05, |
|
"loss": 2.1215, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7077874452321783e-05, |
|
"loss": 2.1721, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6728030953882356e-05, |
|
"loss": 2.1026, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6380113202059463e-05, |
|
"loss": 2.1411, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.6034130340887895e-05, |
|
"loss": 2.038, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5690091463549326e-05, |
|
"loss": 2.0145, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5348005612133207e-05, |
|
"loss": 1.9861, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.5007881777399354e-05, |
|
"loss": 2.0184, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.4669728898541456e-05, |
|
"loss": 1.9674, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.4333555862952197e-05, |
|
"loss": 1.9125, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.3999371505989686e-05, |
|
"loss": 2.1081, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.3667184610745252e-05, |
|
"loss": 2.0656, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.333700390781256e-05, |
|
"loss": 2.0374, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.300883807505819e-05, |
|
"loss": 2.0221, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.2682695737393567e-05, |
|
"loss": 2.2395, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.2358585466548267e-05, |
|
"loss": 2.153, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.2036515780844667e-05, |
|
"loss": 2.0518, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.1716495144974213e-05, |
|
"loss": 2.0957, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.139853196977485e-05, |
|
"loss": 2.0684, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.1082634612009954e-05, |
|
"loss": 2.0296, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.07688113741488e-05, |
|
"loss": 2.0239, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.0457070504148158e-05, |
|
"loss": 2.0583, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.0147420195235745e-05, |
|
"loss": 2.2235, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9839868585694764e-05, |
|
"loss": 2.0935, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9534423758650043e-05, |
|
"loss": 2.0169, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.923109374185559e-05, |
|
"loss": 2.1425, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.8929886507483542e-05, |
|
"loss": 2.0657, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8630809971914775e-05, |
|
"loss": 2.0442, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8333871995530726e-05, |
|
"loss": 2.0198, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.803908038250689e-05, |
|
"loss": 2.1117, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7746442880607627e-05, |
|
"loss": 2.1318, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7455967180982592e-05, |
|
"loss": 2.2086, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.7167660917964556e-05, |
|
"loss": 2.101, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6881531668868822e-05, |
|
"loss": 2.1029, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6597586953793986e-05, |
|
"loss": 2.0794, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6315834235424422e-05, |
|
"loss": 2.1892, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.6036280918833924e-05, |
|
"loss": 2.0438, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.5758934351291366e-05, |
|
"loss": 2.1444, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5483801822067388e-05, |
|
"loss": 2.0991, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.521089056224292e-05, |
|
"loss": 2.1623, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.4940207744519118e-05, |
|
"loss": 1.9828, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.467176048302875e-05, |
|
"loss": 2.0164, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4405555833149398e-05, |
|
"loss": 1.9968, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.414160079131791e-05, |
|
"loss": 2.1906, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3879902294846536e-05, |
|
"loss": 2.0576, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.362046722174063e-05, |
|
"loss": 2.1238, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3363302390517796e-05, |
|
"loss": 2.0443, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.3108414560028836e-05, |
|
"loss": 2.1498, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2855810429279957e-05, |
|
"loss": 1.9367, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2605496637256809e-05, |
|
"loss": 2.0559, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2357479762749946e-05, |
|
"loss": 1.9912, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2111766324181938e-05, |
|
"loss": 2.0629, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.186836277943606e-05, |
|
"loss": 2.0062, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1627275525686543e-05, |
|
"loss": 2.0624, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.1388510899230465e-05, |
|
"loss": 2.118, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.115207517532122e-05, |
|
"loss": 2.0604, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.0917974568003552e-05, |
|
"loss": 2.0186, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0686215229950269e-05, |
|
"loss": 2.2035, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0456803252300574e-05, |
|
"loss": 2.0591, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0229744664499897e-05, |
|
"loss": 1.9376, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0005045434141502e-05, |
|
"loss": 2.2335, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.782711466809536e-06, |
|
"loss": 2.1306, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.562748605923977e-06, |
|
"loss": 1.9716, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.345162632586945e-06, |
|
"loss": 2.0057, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.129959265430766e-06, |
|
"loss": 2.1155, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.917144160467717e-06, |
|
"loss": 2.1094, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.706722910941345e-06, |
|
"loss": 1.9811, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.498701047179491e-06, |
|
"loss": 2.1029, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.293084036448895e-06, |
|
"loss": 2.0438, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.089877282811542e-06, |
|
"loss": 1.9135, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.889086126982658e-06, |
|
"loss": 2.0472, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.690715846190233e-06, |
|
"loss": 2.0515, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.4947716540364475e-06, |
|
"loss": 2.0981, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.301258700360603e-06, |
|
"loss": 1.9314, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.110182071103766e-06, |
|
"loss": 2.1015, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.921546788175081e-06, |
|
"loss": 2.0897, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.735357809319809e-06, |
|
"loss": 1.9404, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.551620027989036e-06, |
|
"loss": 2.0471, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.370338273211052e-06, |
|
"loss": 2.1991, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.191517309464412e-06, |
|
"loss": 2.1049, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.015161836552763e-06, |
|
"loss": 2.102, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.84127648948124e-06, |
|
"loss": 2.106, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.669865838334754e-06, |
|
"loss": 2.0623, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.500934388157808e-06, |
|
"loss": 2.0537, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.334486578836118e-06, |
|
"loss": 2.0713, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.1705267849799126e-06, |
|
"loss": 2.1706, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.009059315808972e-06, |
|
"loss": 2.0049, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.850088415039378e-06, |
|
"loss": 2.1148, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.693618260771937e-06, |
|
"loss": 2.034, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.539652965382435e-06, |
|
"loss": 2.0008, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.388196575413483e-06, |
|
"loss": 2.0006, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.2392530714682745e-06, |
|
"loss": 2.2408, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.092826368105795e-06, |
|
"loss": 1.8795, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.948920313738114e-06, |
|
"loss": 2.1633, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.8075386905291554e-06, |
|
"loss": 2.1717, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.6686852142952868e-06, |
|
"loss": 2.0487, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.532363534407712e-06, |
|
"loss": 2.0741, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.398577233696454e-06, |
|
"loss": 2.0168, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.2673298283563246e-06, |
|
"loss": 2.0445, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.1386247678544365e-06, |
|
"loss": 2.1638, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.012465434839529e-06, |
|
"loss": 2.135, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.8888551450531266e-06, |
|
"loss": 1.9357, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.7677971472423013e-06, |
|
"loss": 2.028, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.6492946230743943e-06, |
|
"loss": 2.2156, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.5333506870533374e-06, |
|
"loss": 1.9134, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.4199683864377763e-06, |
|
"loss": 2.0354, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.3091507011610557e-06, |
|
"loss": 2.0408, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.200900543752793e-06, |
|
"loss": 2.2062, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.0952207592624505e-06, |
|
"loss": 1.9886, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.9921141251844633e-06, |
|
"loss": 2.1352, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.8915833513852955e-06, |
|
"loss": 2.1835, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.793631080032232e-06, |
|
"loss": 1.9694, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.6982598855238564e-06, |
|
"loss": 2.1328, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.6054722744224593e-06, |
|
"loss": 2.1902, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.5152706853881704e-06, |
|
"loss": 1.9398, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.4276574891148087e-06, |
|
"loss": 2.1243, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.3426349882676325e-06, |
|
"loss": 2.0591, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.260205417422755e-06, |
|
"loss": 2.0554, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1803709430084798e-06, |
|
"loss": 2.0492, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1031336632483347e-06, |
|
"loss": 1.9757, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.0284956081059172e-06, |
|
"loss": 2.1138, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.564587392315473e-07, |
|
"loss": 1.9727, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.870249499107419e-07, |
|
"loss": 2.0748, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.201960650144003e-07, |
|
"loss": 2.1319, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.559738409508855e-07, |
|
"loss": 2.2263, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.943599656198507e-07, |
|
"loss": 2.0016, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.353560583678641e-07, |
|
"loss": 2.1376, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.789636699458534e-07, |
|
"loss": 1.9401, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.251842824683716e-07, |
|
"loss": 2.0978, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.740193093746292e-07, |
|
"loss": 2.0932, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.2547009539132264e-07, |
|
"loss": 2.1511, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.7953791649734115e-07, |
|
"loss": 2.0815, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.362239798901712e-07, |
|
"loss": 2.0977, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9552942395423277e-07, |
|
"loss": 2.0939, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.5745531823090366e-07, |
|
"loss": 2.0009, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.2200266339045262e-07, |
|
"loss": 2.064, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.8917239120569418e-07, |
|
"loss": 2.0858, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.589653645275635e-07, |
|
"loss": 2.1642, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.313823772623679e-07, |
|
"loss": 2.062, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.064241543509814e-07, |
|
"loss": 2.254, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.40913517497377e-08, |
|
"loss": 2.08, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.438455641324392e-08, |
|
"loss": 2.0593, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.730428627892636e-08, |
|
"loss": 1.963, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.2850990253396885e-08, |
|
"loss": 2.0726, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.102504820069573e-08, |
|
"loss": 2.0944, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.1826770932288345e-08, |
|
"loss": 2.207, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.256400198894174e-09, |
|
"loss": 2.1563, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.3141086841250706e-09, |
|
"loss": 2.0391, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.9875, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 3405, |
|
"total_flos": 1.620805940412416e+16, |
|
"train_loss": 2.4601817576370575, |
|
"train_runtime": 30304.9667, |
|
"train_samples_per_second": 0.898, |
|
"train_steps_per_second": 0.112 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 3405, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 1.620805940412416e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|