|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9987096774193547, |
|
"eval_steps": 500, |
|
"global_step": 3486, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008602150537634409, |
|
"grad_norm": 1.435989601453093, |
|
"learning_rate": 5.730659025787966e-06, |
|
"loss": 1.2534, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.017204301075268817, |
|
"grad_norm": 0.956311142058486, |
|
"learning_rate": 1.1461318051575932e-05, |
|
"loss": 1.1941, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.025806451612903226, |
|
"grad_norm": 0.5955272986463238, |
|
"learning_rate": 1.7191977077363898e-05, |
|
"loss": 1.0416, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.034408602150537634, |
|
"grad_norm": 0.5371220864077184, |
|
"learning_rate": 2.2922636103151864e-05, |
|
"loss": 0.9252, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.043010752688172046, |
|
"grad_norm": 0.3298242735866968, |
|
"learning_rate": 2.8653295128939826e-05, |
|
"loss": 0.8582, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05161290322580645, |
|
"grad_norm": 0.3017426075394883, |
|
"learning_rate": 3.4383954154727795e-05, |
|
"loss": 0.7998, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.060215053763440864, |
|
"grad_norm": 0.3375526814194791, |
|
"learning_rate": 4.011461318051576e-05, |
|
"loss": 0.7638, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06881720430107527, |
|
"grad_norm": 0.2705319023822148, |
|
"learning_rate": 4.584527220630373e-05, |
|
"loss": 0.731, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07741935483870968, |
|
"grad_norm": 0.2857694097360681, |
|
"learning_rate": 5.157593123209169e-05, |
|
"loss": 0.7083, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08602150537634409, |
|
"grad_norm": 0.3098635398347818, |
|
"learning_rate": 5.730659025787965e-05, |
|
"loss": 0.6841, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09462365591397849, |
|
"grad_norm": 0.3198686339975185, |
|
"learning_rate": 6.303724928366761e-05, |
|
"loss": 0.6606, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1032258064516129, |
|
"grad_norm": 0.3190855908911574, |
|
"learning_rate": 6.876790830945559e-05, |
|
"loss": 0.6557, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11182795698924732, |
|
"grad_norm": 0.3381628025718819, |
|
"learning_rate": 7.449856733524355e-05, |
|
"loss": 0.649, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12043010752688173, |
|
"grad_norm": 0.31598591910637613, |
|
"learning_rate": 8.022922636103152e-05, |
|
"loss": 0.6497, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 0.3128921107543011, |
|
"learning_rate": 8.595988538681948e-05, |
|
"loss": 0.6392, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.13763440860215054, |
|
"grad_norm": 0.32601075629670023, |
|
"learning_rate": 9.169054441260745e-05, |
|
"loss": 0.6329, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14623655913978495, |
|
"grad_norm": 0.3810140632755748, |
|
"learning_rate": 9.742120343839543e-05, |
|
"loss": 0.6246, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15483870967741936, |
|
"grad_norm": 0.32108686980228696, |
|
"learning_rate": 0.00010315186246418338, |
|
"loss": 0.6341, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16344086021505377, |
|
"grad_norm": 0.35140482524130334, |
|
"learning_rate": 0.00010888252148997136, |
|
"loss": 0.6341, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17204301075268819, |
|
"grad_norm": 0.29928696988494263, |
|
"learning_rate": 0.0001146131805157593, |
|
"loss": 0.6256, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18064516129032257, |
|
"grad_norm": 0.29203351329015653, |
|
"learning_rate": 0.0001203438395415473, |
|
"loss": 0.6206, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18924731182795698, |
|
"grad_norm": 0.269908780529402, |
|
"learning_rate": 0.00012607449856733523, |
|
"loss": 0.6201, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1978494623655914, |
|
"grad_norm": 0.2918951896599236, |
|
"learning_rate": 0.0001318051575931232, |
|
"loss": 0.6181, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2064516129032258, |
|
"grad_norm": 0.2708453980927917, |
|
"learning_rate": 0.00013753581661891118, |
|
"loss": 0.6053, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21505376344086022, |
|
"grad_norm": 0.2662594416135698, |
|
"learning_rate": 0.00014326647564469916, |
|
"loss": 0.6033, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22365591397849463, |
|
"grad_norm": 0.2776129880103733, |
|
"learning_rate": 0.0001489971346704871, |
|
"loss": 0.618, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.23225806451612904, |
|
"grad_norm": 0.279068656400945, |
|
"learning_rate": 0.00015472779369627508, |
|
"loss": 0.6111, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.24086021505376345, |
|
"grad_norm": 0.2524990953581738, |
|
"learning_rate": 0.00016045845272206303, |
|
"loss": 0.5888, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24946236559139784, |
|
"grad_norm": 0.27628464942088166, |
|
"learning_rate": 0.000166189111747851, |
|
"loss": 0.6051, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 0.2738432329342113, |
|
"learning_rate": 0.00017191977077363896, |
|
"loss": 0.6025, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.25607005690614404, |
|
"learning_rate": 0.00017765042979942693, |
|
"loss": 0.6035, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2752688172043011, |
|
"grad_norm": 0.247801611216806, |
|
"learning_rate": 0.0001833810888252149, |
|
"loss": 0.6118, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2838709677419355, |
|
"grad_norm": 0.24971595808191485, |
|
"learning_rate": 0.00018911174785100288, |
|
"loss": 0.6003, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2924731182795699, |
|
"grad_norm": 0.2411785853482113, |
|
"learning_rate": 0.00019484240687679086, |
|
"loss": 0.6019, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3010752688172043, |
|
"grad_norm": 0.24693597842223552, |
|
"learning_rate": 0.0001999999498534943, |
|
"loss": 0.5912, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3096774193548387, |
|
"grad_norm": 0.23649676383007667, |
|
"learning_rate": 0.0001999939323336644, |
|
"loss": 0.6032, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.31827956989247314, |
|
"grad_norm": 0.23340962816367694, |
|
"learning_rate": 0.0001999778862042167, |
|
"loss": 0.5867, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.32688172043010755, |
|
"grad_norm": 0.228292747503904, |
|
"learning_rate": 0.0001999518130744525, |
|
"loss": 0.5869, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.33548387096774196, |
|
"grad_norm": 0.23546242335669995, |
|
"learning_rate": 0.0001999157155593029, |
|
"loss": 0.585, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.34408602150537637, |
|
"grad_norm": 0.23135215802858206, |
|
"learning_rate": 0.0001998695972790664, |
|
"loss": 0.5929, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.35268817204301073, |
|
"grad_norm": 0.22524299635343834, |
|
"learning_rate": 0.00019981346285904595, |
|
"loss": 0.5976, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.36129032258064514, |
|
"grad_norm": 0.21756192058074578, |
|
"learning_rate": 0.00019974731792908506, |
|
"loss": 0.587, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.36989247311827955, |
|
"grad_norm": 0.23763473038875538, |
|
"learning_rate": 0.000199671169123003, |
|
"loss": 0.5934, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.37849462365591396, |
|
"grad_norm": 0.23701431523985933, |
|
"learning_rate": 0.00019958502407792963, |
|
"loss": 0.6009, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 0.22134697304917678, |
|
"learning_rate": 0.00019948889143353948, |
|
"loss": 0.5994, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3956989247311828, |
|
"grad_norm": 0.2268467235014266, |
|
"learning_rate": 0.00019938278083118517, |
|
"loss": 0.5982, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4043010752688172, |
|
"grad_norm": 0.2247581060586749, |
|
"learning_rate": 0.00019926670291293055, |
|
"loss": 0.5782, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4129032258064516, |
|
"grad_norm": 0.21243655986167898, |
|
"learning_rate": 0.00019914066932048317, |
|
"loss": 0.5874, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.421505376344086, |
|
"grad_norm": 0.21385338375703783, |
|
"learning_rate": 0.00019900469269402708, |
|
"loss": 0.5813, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.43010752688172044, |
|
"grad_norm": 0.21415772228830113, |
|
"learning_rate": 0.00019885878667095472, |
|
"loss": 0.5934, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.43870967741935485, |
|
"grad_norm": 0.23153816801674154, |
|
"learning_rate": 0.00019870296588449948, |
|
"loss": 0.5766, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.44731182795698926, |
|
"grad_norm": 0.21768913976009108, |
|
"learning_rate": 0.00019853724596226795, |
|
"loss": 0.5964, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4559139784946237, |
|
"grad_norm": 0.21481929780656356, |
|
"learning_rate": 0.00019836164352467263, |
|
"loss": 0.5748, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4645161290322581, |
|
"grad_norm": 0.29328457757364856, |
|
"learning_rate": 0.00019817617618326516, |
|
"loss": 0.5863, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4731182795698925, |
|
"grad_norm": 0.2140715456768234, |
|
"learning_rate": 0.00019798086253896974, |
|
"loss": 0.5787, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.4817204301075269, |
|
"grad_norm": 0.22386132897333755, |
|
"learning_rate": 0.00019777572218021789, |
|
"loss": 0.5863, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.49032258064516127, |
|
"grad_norm": 0.23289220288123677, |
|
"learning_rate": 0.00019756077568098372, |
|
"loss": 0.5638, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4989247311827957, |
|
"grad_norm": 0.21115266625450407, |
|
"learning_rate": 0.00019733604459872055, |
|
"loss": 0.5744, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5075268817204301, |
|
"grad_norm": 0.2207246383657776, |
|
"learning_rate": 0.00019710155147219886, |
|
"loss": 0.5789, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5161290322580645, |
|
"grad_norm": 0.2127412426650313, |
|
"learning_rate": 0.00019685731981924592, |
|
"loss": 0.5764, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.524731182795699, |
|
"grad_norm": 0.2256297779595538, |
|
"learning_rate": 0.00019660337413438697, |
|
"loss": 0.5798, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.22243638154383857, |
|
"learning_rate": 0.00019633973988638877, |
|
"loss": 0.5767, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5419354838709678, |
|
"grad_norm": 0.21425852582870433, |
|
"learning_rate": 0.00019606644351570518, |
|
"loss": 0.5736, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5505376344086022, |
|
"grad_norm": 0.20506928092936336, |
|
"learning_rate": 0.00019578351243182545, |
|
"loss": 0.5766, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5591397849462365, |
|
"grad_norm": 0.2175808639953357, |
|
"learning_rate": 0.0001954909750105252, |
|
"loss": 0.5883, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.567741935483871, |
|
"grad_norm": 0.21972174494018643, |
|
"learning_rate": 0.00019518886059102062, |
|
"loss": 0.5774, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5763440860215053, |
|
"grad_norm": 0.2222552681428519, |
|
"learning_rate": 0.0001948771994730259, |
|
"loss": 0.5694, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5849462365591398, |
|
"grad_norm": 0.21552094411185982, |
|
"learning_rate": 0.0001945560229137145, |
|
"loss": 0.5657, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5935483870967742, |
|
"grad_norm": 0.21243053577901072, |
|
"learning_rate": 0.0001942253631245842, |
|
"loss": 0.5776, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6021505376344086, |
|
"grad_norm": 0.21257424919590467, |
|
"learning_rate": 0.00019388525326822665, |
|
"loss": 0.5764, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.610752688172043, |
|
"grad_norm": 0.21485742957719725, |
|
"learning_rate": 0.0001935357274550013, |
|
"loss": 0.5821, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6193548387096774, |
|
"grad_norm": 0.2184569847068338, |
|
"learning_rate": 0.00019317682073961444, |
|
"loss": 0.5674, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6279569892473118, |
|
"grad_norm": 0.20912150886711225, |
|
"learning_rate": 0.00019280856911760354, |
|
"loss": 0.559, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6365591397849463, |
|
"grad_norm": 0.21433879312995557, |
|
"learning_rate": 0.00019243100952172723, |
|
"loss": 0.5615, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6451612903225806, |
|
"grad_norm": 0.22214855400737676, |
|
"learning_rate": 0.00019204417981826091, |
|
"loss": 0.5761, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6537634408602151, |
|
"grad_norm": 0.2196194509755129, |
|
"learning_rate": 0.00019164811880319958, |
|
"loss": 0.5718, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6623655913978495, |
|
"grad_norm": 0.2089872234950888, |
|
"learning_rate": 0.00019124286619836637, |
|
"loss": 0.5819, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6709677419354839, |
|
"grad_norm": 0.20958335214398927, |
|
"learning_rate": 0.00019082846264742917, |
|
"loss": 0.5663, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6795698924731183, |
|
"grad_norm": 0.22250020784958888, |
|
"learning_rate": 0.00019040494971182413, |
|
"loss": 0.572, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6881720430107527, |
|
"grad_norm": 0.2162793985278103, |
|
"learning_rate": 0.00018997236986658753, |
|
"loss": 0.5578, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6967741935483871, |
|
"grad_norm": 0.21650281403806723, |
|
"learning_rate": 0.00018953076649609564, |
|
"loss": 0.5698, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7053763440860215, |
|
"grad_norm": 0.21882852535131322, |
|
"learning_rate": 0.000189080183889714, |
|
"loss": 0.5712, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7139784946236559, |
|
"grad_norm": 0.21577728486568806, |
|
"learning_rate": 0.00018862066723735512, |
|
"loss": 0.5676, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7225806451612903, |
|
"grad_norm": 0.20731869739137426, |
|
"learning_rate": 0.00018815226262494655, |
|
"loss": 0.567, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7311827956989247, |
|
"grad_norm": 0.21966746890763253, |
|
"learning_rate": 0.00018767501702980874, |
|
"loss": 0.5738, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7397849462365591, |
|
"grad_norm": 0.2199440815107056, |
|
"learning_rate": 0.00018718897831594355, |
|
"loss": 0.5691, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7483870967741936, |
|
"grad_norm": 0.21722437739899045, |
|
"learning_rate": 0.00018669419522923393, |
|
"loss": 0.5753, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7569892473118279, |
|
"grad_norm": 0.2218916153172574, |
|
"learning_rate": 0.00018619071739255506, |
|
"loss": 0.5572, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7655913978494624, |
|
"grad_norm": 0.20612913226427296, |
|
"learning_rate": 0.00018567859530079753, |
|
"loss": 0.567, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 0.2296891108499081, |
|
"learning_rate": 0.00018515788031580317, |
|
"loss": 0.5735, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7827956989247312, |
|
"grad_norm": 0.20791733581224098, |
|
"learning_rate": 0.0001846286246612138, |
|
"loss": 0.56, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7913978494623656, |
|
"grad_norm": 0.2130914822501822, |
|
"learning_rate": 0.00018409088141723364, |
|
"loss": 0.5588, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.21931892060132505, |
|
"learning_rate": 0.00018354470451530574, |
|
"loss": 0.5718, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8086021505376344, |
|
"grad_norm": 0.20510223609898656, |
|
"learning_rate": 0.00018299014873270314, |
|
"loss": 0.5693, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8172043010752689, |
|
"grad_norm": 0.20754250893604087, |
|
"learning_rate": 0.00018242726968703505, |
|
"loss": 0.5657, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8258064516129032, |
|
"grad_norm": 0.2119047697743998, |
|
"learning_rate": 0.00018185612383066893, |
|
"loss": 0.5523, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8344086021505376, |
|
"grad_norm": 0.21665086623022414, |
|
"learning_rate": 0.00018127676844506874, |
|
"loss": 0.5564, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.843010752688172, |
|
"grad_norm": 0.22240006490076028, |
|
"learning_rate": 0.00018068926163505, |
|
"loss": 0.5661, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8516129032258064, |
|
"grad_norm": 0.20224509449303205, |
|
"learning_rate": 0.00018009366232295235, |
|
"loss": 0.5709, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8602150537634409, |
|
"grad_norm": 0.20943438346218987, |
|
"learning_rate": 0.00017949003024273015, |
|
"loss": 0.5567, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8688172043010752, |
|
"grad_norm": 0.2119577392060311, |
|
"learning_rate": 0.0001788784259339616, |
|
"loss": 0.5726, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.8774193548387097, |
|
"grad_norm": 0.21671277914581005, |
|
"learning_rate": 0.0001782589107357771, |
|
"loss": 0.5721, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.886021505376344, |
|
"grad_norm": 0.21042958546072746, |
|
"learning_rate": 0.00017763154678070733, |
|
"loss": 0.5682, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8946236559139785, |
|
"grad_norm": 0.21134103487495276, |
|
"learning_rate": 0.0001769963969884521, |
|
"loss": 0.5578, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9032258064516129, |
|
"grad_norm": 0.2138330925142033, |
|
"learning_rate": 0.0001763535250595696, |
|
"loss": 0.5622, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9118279569892473, |
|
"grad_norm": 0.20191022832768318, |
|
"learning_rate": 0.00017570299546908812, |
|
"loss": 0.5543, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9204301075268817, |
|
"grad_norm": 0.20993539174391937, |
|
"learning_rate": 0.0001750448734600394, |
|
"loss": 0.5687, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9290322580645162, |
|
"grad_norm": 0.21448241595075324, |
|
"learning_rate": 0.0001743792250369155, |
|
"loss": 0.5628, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.9376344086021505, |
|
"grad_norm": 0.21562115673441815, |
|
"learning_rate": 0.00017370611695904895, |
|
"loss": 0.5707, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.946236559139785, |
|
"grad_norm": 0.20800257947078069, |
|
"learning_rate": 0.00017302561673391732, |
|
"loss": 0.5682, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9548387096774194, |
|
"grad_norm": 0.2142950321860787, |
|
"learning_rate": 0.00017233779261037268, |
|
"loss": 0.5644, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9634408602150538, |
|
"grad_norm": 0.22988301024268806, |
|
"learning_rate": 0.00017164271357179698, |
|
"loss": 0.5747, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9720430107526882, |
|
"grad_norm": 0.2044546345524181, |
|
"learning_rate": 0.00017094044932918336, |
|
"loss": 0.5627, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9806451612903225, |
|
"grad_norm": 0.20801733651601362, |
|
"learning_rate": 0.00017023107031414477, |
|
"loss": 0.5655, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.989247311827957, |
|
"grad_norm": 0.21348629669321226, |
|
"learning_rate": 0.00016951464767185013, |
|
"loss": 0.5569, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9978494623655914, |
|
"grad_norm": 0.21098078403488563, |
|
"learning_rate": 0.0001687912532538892, |
|
"loss": 0.5686, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.0064516129032257, |
|
"grad_norm": 0.23232328451409845, |
|
"learning_rate": 0.00016806095961106632, |
|
"loss": 0.5352, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0150537634408603, |
|
"grad_norm": 0.21849412389861325, |
|
"learning_rate": 0.00016732383998612407, |
|
"loss": 0.5166, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.0236559139784946, |
|
"grad_norm": 0.22255257432573036, |
|
"learning_rate": 0.00016657996830639774, |
|
"loss": 0.5286, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.032258064516129, |
|
"grad_norm": 0.23162311614487038, |
|
"learning_rate": 0.0001658294191764009, |
|
"loss": 0.5209, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0408602150537634, |
|
"grad_norm": 0.23679830183057837, |
|
"learning_rate": 0.0001650722678703432, |
|
"loss": 0.5155, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.049462365591398, |
|
"grad_norm": 0.24431579126710756, |
|
"learning_rate": 0.00016430859032458086, |
|
"loss": 0.5239, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.0580645161290323, |
|
"grad_norm": 0.21925666538005387, |
|
"learning_rate": 0.00016353846313000098, |
|
"loss": 0.5142, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 0.23676074055821078, |
|
"learning_rate": 0.00016276196352434, |
|
"loss": 0.5267, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.075268817204301, |
|
"grad_norm": 0.23532815378000083, |
|
"learning_rate": 0.00016197916938443733, |
|
"loss": 0.5211, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.0838709677419356, |
|
"grad_norm": 0.23216395268938977, |
|
"learning_rate": 0.00016119015921842503, |
|
"loss": 0.5161, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.09247311827957, |
|
"grad_norm": 0.23822646088799643, |
|
"learning_rate": 0.0001603950121578539, |
|
"loss": 0.5077, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.1010752688172043, |
|
"grad_norm": 0.2290007557059369, |
|
"learning_rate": 0.00015959380794975734, |
|
"loss": 0.5156, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.1096774193548387, |
|
"grad_norm": 0.22484506558131512, |
|
"learning_rate": 0.00015878662694865327, |
|
"loss": 0.5205, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.118279569892473, |
|
"grad_norm": 0.22971689352325197, |
|
"learning_rate": 0.00015797355010848519, |
|
"loss": 0.5218, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.1268817204301076, |
|
"grad_norm": 0.23491219834804952, |
|
"learning_rate": 0.00015715465897450317, |
|
"loss": 0.5248, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.135483870967742, |
|
"grad_norm": 0.23555948179310204, |
|
"learning_rate": 0.00015633003567508557, |
|
"loss": 0.524, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.1440860215053763, |
|
"grad_norm": 0.2275140567256555, |
|
"learning_rate": 0.00015549976291350204, |
|
"loss": 0.517, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.1526881720430107, |
|
"grad_norm": 0.22712302146976637, |
|
"learning_rate": 0.00015466392395961908, |
|
"loss": 0.5243, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.1612903225806452, |
|
"grad_norm": 0.22097671231695554, |
|
"learning_rate": 0.0001538226026415489, |
|
"loss": 0.5118, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1698924731182796, |
|
"grad_norm": 0.22706370204760404, |
|
"learning_rate": 0.00015297588333724183, |
|
"loss": 0.5235, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.178494623655914, |
|
"grad_norm": 0.2309770159886724, |
|
"learning_rate": 0.00015212385096602415, |
|
"loss": 0.5202, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.1870967741935483, |
|
"grad_norm": 0.23885726185135214, |
|
"learning_rate": 0.0001512665909800811, |
|
"loss": 0.5201, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.1956989247311829, |
|
"grad_norm": 0.23447118491147034, |
|
"learning_rate": 0.00015040418935588682, |
|
"loss": 0.5103, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.2043010752688172, |
|
"grad_norm": 0.24148993682260503, |
|
"learning_rate": 0.0001495367325855816, |
|
"loss": 0.5229, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.2129032258064516, |
|
"grad_norm": 0.22483070355440393, |
|
"learning_rate": 0.00014866430766829743, |
|
"loss": 0.5132, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.221505376344086, |
|
"grad_norm": 0.2254845362019036, |
|
"learning_rate": 0.00014778700210143242, |
|
"loss": 0.5231, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.2301075268817203, |
|
"grad_norm": 0.23952864821588327, |
|
"learning_rate": 0.00014690490387187584, |
|
"loss": 0.5157, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.238709677419355, |
|
"grad_norm": 0.2257464446574845, |
|
"learning_rate": 0.00014601810144718345, |
|
"loss": 0.537, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.2473118279569892, |
|
"grad_norm": 0.23617654028249638, |
|
"learning_rate": 0.0001451266837667051, |
|
"loss": 0.5187, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.2559139784946236, |
|
"grad_norm": 0.23682708876074962, |
|
"learning_rate": 0.00014423074023266463, |
|
"loss": 0.52, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.2645161290322582, |
|
"grad_norm": 0.22896552654447108, |
|
"learning_rate": 0.00014333036070119363, |
|
"loss": 0.5163, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.2731182795698925, |
|
"grad_norm": 0.24027250250612717, |
|
"learning_rate": 0.00014242563547331956, |
|
"loss": 0.517, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.281720430107527, |
|
"grad_norm": 0.23395780934976781, |
|
"learning_rate": 0.00014151665528590925, |
|
"loss": 0.5215, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.2903225806451613, |
|
"grad_norm": 0.24728067879450083, |
|
"learning_rate": 0.0001406035113025687, |
|
"loss": 0.5208, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2989247311827956, |
|
"grad_norm": 0.22394688366493132, |
|
"learning_rate": 0.0001396862951045001, |
|
"loss": 0.5274, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.3075268817204302, |
|
"grad_norm": 0.23212309124084482, |
|
"learning_rate": 0.00013876509868131692, |
|
"loss": 0.5172, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.3161290322580645, |
|
"grad_norm": 0.23245525514987117, |
|
"learning_rate": 0.0001378400144218181, |
|
"loss": 0.521, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.324731182795699, |
|
"grad_norm": 0.24340497246468923, |
|
"learning_rate": 0.00013691113510472212, |
|
"loss": 0.533, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.2321928544760521, |
|
"learning_rate": 0.00013597855388936217, |
|
"loss": 0.5283, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.3419354838709676, |
|
"grad_norm": 0.23326523784187517, |
|
"learning_rate": 0.00013504236430634286, |
|
"loss": 0.5416, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.3505376344086022, |
|
"grad_norm": 0.2262845978211386, |
|
"learning_rate": 0.0001341026602481597, |
|
"loss": 0.5231, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.3591397849462366, |
|
"grad_norm": 0.2371091538942524, |
|
"learning_rate": 0.00013315953595978287, |
|
"loss": 0.5328, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.367741935483871, |
|
"grad_norm": 0.2404491874213034, |
|
"learning_rate": 0.00013221308602920468, |
|
"loss": 0.5273, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.3763440860215055, |
|
"grad_norm": 0.2382492165329928, |
|
"learning_rate": 0.00013126340537795343, |
|
"loss": 0.529, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.3849462365591398, |
|
"grad_norm": 0.22921228109841923, |
|
"learning_rate": 0.0001303105892515734, |
|
"loss": 0.5198, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.3935483870967742, |
|
"grad_norm": 0.22823020260902263, |
|
"learning_rate": 0.00012935473321007255, |
|
"loss": 0.5215, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.4021505376344086, |
|
"grad_norm": 0.23341543795313835, |
|
"learning_rate": 0.0001283959331183386, |
|
"loss": 0.5179, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.410752688172043, |
|
"grad_norm": 0.23483174318348113, |
|
"learning_rate": 0.00012743428513652442, |
|
"loss": 0.5242, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.4193548387096775, |
|
"grad_norm": 0.23015296236307278, |
|
"learning_rate": 0.00012646988571040398, |
|
"loss": 0.522, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4279569892473118, |
|
"grad_norm": 0.24213814226406005, |
|
"learning_rate": 0.00012550283156169965, |
|
"loss": 0.5238, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.4365591397849462, |
|
"grad_norm": 0.23239320809561437, |
|
"learning_rate": 0.0001245332196783817, |
|
"loss": 0.5156, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.4451612903225808, |
|
"grad_norm": 0.23180542434985962, |
|
"learning_rate": 0.00012356114730494123, |
|
"loss": 0.5184, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.453763440860215, |
|
"grad_norm": 0.23588148083202282, |
|
"learning_rate": 0.00012258671193263716, |
|
"loss": 0.5155, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.4623655913978495, |
|
"grad_norm": 0.23435001337205247, |
|
"learning_rate": 0.00012161001128971879, |
|
"loss": 0.5293, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.4709677419354839, |
|
"grad_norm": 0.22453229677185074, |
|
"learning_rate": 0.00012063114333162438, |
|
"loss": 0.5151, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.4795698924731182, |
|
"grad_norm": 0.2513082837299489, |
|
"learning_rate": 0.00011965020623115688, |
|
"loss": 0.5416, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.4881720430107528, |
|
"grad_norm": 0.23485488435380322, |
|
"learning_rate": 0.0001186672983686381, |
|
"loss": 0.5114, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.4967741935483871, |
|
"grad_norm": 0.2339236059203922, |
|
"learning_rate": 0.00011768251832204187, |
|
"loss": 0.5189, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.5053763440860215, |
|
"grad_norm": 0.2363738308894046, |
|
"learning_rate": 0.00011669596485710741, |
|
"loss": 0.5236, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.513978494623656, |
|
"grad_norm": 0.23458902400188686, |
|
"learning_rate": 0.00011570773691743397, |
|
"loss": 0.5284, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.5225806451612902, |
|
"grad_norm": 0.23836545061013703, |
|
"learning_rate": 0.0001147179336145575, |
|
"loss": 0.5121, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.5311827956989248, |
|
"grad_norm": 0.23790373487214236, |
|
"learning_rate": 0.00011372665421801056, |
|
"loss": 0.5165, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.5397849462365591, |
|
"grad_norm": 0.23929874324218836, |
|
"learning_rate": 0.0001127339981453664, |
|
"loss": 0.5257, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.5483870967741935, |
|
"grad_norm": 0.23994815774490452, |
|
"learning_rate": 0.00011174006495226812, |
|
"loss": 0.5125, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.556989247311828, |
|
"grad_norm": 0.22986043036890028, |
|
"learning_rate": 0.00011074495432244397, |
|
"loss": 0.5142, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.5655913978494622, |
|
"grad_norm": 0.2276160657387438, |
|
"learning_rate": 0.0001097487660577099, |
|
"loss": 0.5198, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.5741935483870968, |
|
"grad_norm": 0.23590750768735258, |
|
"learning_rate": 0.00010875160006796024, |
|
"loss": 0.5203, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.5827956989247312, |
|
"grad_norm": 0.24238494113010198, |
|
"learning_rate": 0.00010775355636114755, |
|
"loss": 0.5288, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.5913978494623655, |
|
"grad_norm": 0.23079601762605023, |
|
"learning_rate": 0.00010675473503325245, |
|
"loss": 0.516, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.23473655518093983, |
|
"learning_rate": 0.00010575523625824488, |
|
"loss": 0.5245, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.6086021505376344, |
|
"grad_norm": 0.2297499982621768, |
|
"learning_rate": 0.00010475516027803751, |
|
"loss": 0.5162, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.6172043010752688, |
|
"grad_norm": 0.23292165198339548, |
|
"learning_rate": 0.00010375460739243215, |
|
"loss": 0.5246, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.6258064516129034, |
|
"grad_norm": 0.24340787369255004, |
|
"learning_rate": 0.00010275367794906044, |
|
"loss": 0.5099, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.6344086021505375, |
|
"grad_norm": 0.24374482359627925, |
|
"learning_rate": 0.00010175247233331989, |
|
"loss": 0.5097, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.643010752688172, |
|
"grad_norm": 0.23616305839556445, |
|
"learning_rate": 0.00010075109095830584, |
|
"loss": 0.5231, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.6516129032258065, |
|
"grad_norm": 0.23231972508384582, |
|
"learning_rate": 9.974963425474106e-05, |
|
"loss": 0.5213, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.6602150537634408, |
|
"grad_norm": 0.24023523508198666, |
|
"learning_rate": 9.874820266090303e-05, |
|
"loss": 0.5145, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.6688172043010754, |
|
"grad_norm": 0.24186555744758984, |
|
"learning_rate": 9.774689661255106e-05, |
|
"loss": 0.5256, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.6774193548387095, |
|
"grad_norm": 0.2372332084270041, |
|
"learning_rate": 9.67458165328531e-05, |
|
"loss": 0.5094, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.686021505376344, |
|
"grad_norm": 0.2350639603238352, |
|
"learning_rate": 9.574506282231433e-05, |
|
"loss": 0.5119, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.6946236559139785, |
|
"grad_norm": 0.23559422057113213, |
|
"learning_rate": 9.474473584870757e-05, |
|
"loss": 0.5155, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.7032258064516128, |
|
"grad_norm": 0.2336022634685686, |
|
"learning_rate": 9.374493593700723e-05, |
|
"loss": 0.5113, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.7118279569892474, |
|
"grad_norm": 0.23539681381001779, |
|
"learning_rate": 9.274576335932767e-05, |
|
"loss": 0.5189, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.7204301075268817, |
|
"grad_norm": 0.23650024951926094, |
|
"learning_rate": 9.174731832486648e-05, |
|
"loss": 0.5082, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.729032258064516, |
|
"grad_norm": 0.23916533165546086, |
|
"learning_rate": 9.074970096985427e-05, |
|
"loss": 0.5284, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.7376344086021507, |
|
"grad_norm": 0.23608187656307292, |
|
"learning_rate": 8.975301134751202e-05, |
|
"loss": 0.5172, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.7462365591397848, |
|
"grad_norm": 0.23344322072994478, |
|
"learning_rate": 8.87573494180163e-05, |
|
"loss": 0.511, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.7548387096774194, |
|
"grad_norm": 0.24030220361622895, |
|
"learning_rate": 8.77628150384741e-05, |
|
"loss": 0.5254, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.7634408602150538, |
|
"grad_norm": 0.23404925065530963, |
|
"learning_rate": 8.676950795290802e-05, |
|
"loss": 0.5189, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.772043010752688, |
|
"grad_norm": 0.23053178874827976, |
|
"learning_rate": 8.57775277822526e-05, |
|
"loss": 0.5252, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.7806451612903227, |
|
"grad_norm": 0.23781516582822665, |
|
"learning_rate": 8.478697401436323e-05, |
|
"loss": 0.5146, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.789247311827957, |
|
"grad_norm": 0.2276955393985772, |
|
"learning_rate": 8.379794599403836e-05, |
|
"loss": 0.5169, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.7978494623655914, |
|
"grad_norm": 0.23946966468353473, |
|
"learning_rate": 8.281054291305566e-05, |
|
"loss": 0.5181, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.8064516129032258, |
|
"grad_norm": 0.22844184267941248, |
|
"learning_rate": 8.182486380022426e-05, |
|
"loss": 0.5009, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.8150537634408601, |
|
"grad_norm": 0.2386146449957716, |
|
"learning_rate": 8.084100751145277e-05, |
|
"loss": 0.5167, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.8236559139784947, |
|
"grad_norm": 0.22996861067382443, |
|
"learning_rate": 7.985907271983467e-05, |
|
"loss": 0.5172, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.832258064516129, |
|
"grad_norm": 0.23122069068423765, |
|
"learning_rate": 7.887915790575241e-05, |
|
"loss": 0.512, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.8408602150537634, |
|
"grad_norm": 0.226577342689351, |
|
"learning_rate": 7.790136134700042e-05, |
|
"loss": 0.5181, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.849462365591398, |
|
"grad_norm": 0.24205636496242264, |
|
"learning_rate": 7.692578110892876e-05, |
|
"loss": 0.5206, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.8580645161290321, |
|
"grad_norm": 0.24349873078348327, |
|
"learning_rate": 7.595251503460778e-05, |
|
"loss": 0.5226, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 0.23758597762780134, |
|
"learning_rate": 7.498166073501529e-05, |
|
"loss": 0.5244, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.875268817204301, |
|
"grad_norm": 0.2284958254909026, |
|
"learning_rate": 7.401331557924707e-05, |
|
"loss": 0.505, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.8838709677419354, |
|
"grad_norm": 0.24598875919420438, |
|
"learning_rate": 7.304757668475122e-05, |
|
"loss": 0.5317, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.89247311827957, |
|
"grad_norm": 0.24113280579774263, |
|
"learning_rate": 7.208454090758832e-05, |
|
"loss": 0.5178, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.9010752688172043, |
|
"grad_norm": 0.2362286661628526, |
|
"learning_rate": 7.112430483271746e-05, |
|
"loss": 0.5202, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.9096774193548387, |
|
"grad_norm": 0.23413327678165968, |
|
"learning_rate": 7.016696476430931e-05, |
|
"loss": 0.4944, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.9182795698924733, |
|
"grad_norm": 0.23501629015109748, |
|
"learning_rate": 6.921261671608791e-05, |
|
"loss": 0.5153, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.9268817204301074, |
|
"grad_norm": 0.27066815871872996, |
|
"learning_rate": 6.826135640170101e-05, |
|
"loss": 0.5252, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.935483870967742, |
|
"grad_norm": 0.2439503871040344, |
|
"learning_rate": 6.731327922512074e-05, |
|
"loss": 0.5112, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.9440860215053763, |
|
"grad_norm": 0.23161029568643865, |
|
"learning_rate": 6.636848027107544e-05, |
|
"loss": 0.5307, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.9526881720430107, |
|
"grad_norm": 0.24210935717094484, |
|
"learning_rate": 6.54270542955134e-05, |
|
"loss": 0.5011, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.9612903225806453, |
|
"grad_norm": 0.24063211639329316, |
|
"learning_rate": 6.44890957160994e-05, |
|
"loss": 0.5234, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.9698924731182794, |
|
"grad_norm": 0.23615678793407338, |
|
"learning_rate": 6.355469860274574e-05, |
|
"loss": 0.5046, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.978494623655914, |
|
"grad_norm": 0.22842985809994715, |
|
"learning_rate": 6.262395666817724e-05, |
|
"loss": 0.5012, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.9870967741935484, |
|
"grad_norm": 0.23784853454359128, |
|
"learning_rate": 6.169696325853312e-05, |
|
"loss": 0.519, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.9956989247311827, |
|
"grad_norm": 0.2342469521451372, |
|
"learning_rate": 6.077381134400462e-05, |
|
"loss": 0.5002, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.0043010752688173, |
|
"grad_norm": 0.22833969780513486, |
|
"learning_rate": 5.985459350951121e-05, |
|
"loss": 0.4845, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.0129032258064514, |
|
"grad_norm": 0.2544744796985262, |
|
"learning_rate": 5.893940194541492e-05, |
|
"loss": 0.464, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.021505376344086, |
|
"grad_norm": 0.24382320744489763, |
|
"learning_rate": 5.802832843827419e-05, |
|
"loss": 0.455, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.0301075268817206, |
|
"grad_norm": 0.24697875997895144, |
|
"learning_rate": 5.712146436163863e-05, |
|
"loss": 0.4598, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.0387096774193547, |
|
"grad_norm": 0.2513303656347981, |
|
"learning_rate": 5.6218900666884975e-05, |
|
"loss": 0.4515, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.0473118279569893, |
|
"grad_norm": 0.26236895102262536, |
|
"learning_rate": 5.5320727874095014e-05, |
|
"loss": 0.4598, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.055913978494624, |
|
"grad_norm": 0.25978280232463796, |
|
"learning_rate": 5.4427036062977744e-05, |
|
"loss": 0.451, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.064516129032258, |
|
"grad_norm": 0.26162376545024385, |
|
"learning_rate": 5.3537914863834374e-05, |
|
"loss": 0.4563, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.0731182795698926, |
|
"grad_norm": 0.27200603174186766, |
|
"learning_rate": 5.265345344856979e-05, |
|
"loss": 0.4595, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.0817204301075267, |
|
"grad_norm": 0.2516133186216482, |
|
"learning_rate": 5.1773740521748793e-05, |
|
"loss": 0.4543, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.0903225806451613, |
|
"grad_norm": 0.2559266512535566, |
|
"learning_rate": 5.089886431169999e-05, |
|
"loss": 0.4576, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.098924731182796, |
|
"grad_norm": 0.26643890557668787, |
|
"learning_rate": 5.0028912561667104e-05, |
|
"loss": 0.4651, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.10752688172043, |
|
"grad_norm": 0.2594035563202495, |
|
"learning_rate": 4.916397252100892e-05, |
|
"loss": 0.4581, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.1161290322580646, |
|
"grad_norm": 0.2589087763537241, |
|
"learning_rate": 4.830413093644913e-05, |
|
"loss": 0.4508, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.1247311827956987, |
|
"grad_norm": 0.25564217019549307, |
|
"learning_rate": 4.744947404337605e-05, |
|
"loss": 0.4498, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 0.25846858564022296, |
|
"learning_rate": 4.660008755719397e-05, |
|
"loss": 0.4439, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.141935483870968, |
|
"grad_norm": 0.2615309859057683, |
|
"learning_rate": 4.5756056664726554e-05, |
|
"loss": 0.4621, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.150537634408602, |
|
"grad_norm": 0.2751137583558904, |
|
"learning_rate": 4.491746601567343e-05, |
|
"loss": 0.4542, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.1591397849462366, |
|
"grad_norm": 0.2640093266145403, |
|
"learning_rate": 4.408439971412013e-05, |
|
"loss": 0.4709, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.167741935483871, |
|
"grad_norm": 0.26804533689921306, |
|
"learning_rate": 4.325694131010346e-05, |
|
"loss": 0.4545, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.1763440860215053, |
|
"grad_norm": 0.26953837584010415, |
|
"learning_rate": 4.243517379123193e-05, |
|
"loss": 0.447, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.18494623655914, |
|
"grad_norm": 0.26331494847521747, |
|
"learning_rate": 4.161917957436271e-05, |
|
"loss": 0.4454, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.193548387096774, |
|
"grad_norm": 0.26650178799391877, |
|
"learning_rate": 4.080904049733607e-05, |
|
"loss": 0.4452, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.2021505376344086, |
|
"grad_norm": 0.2596397199813554, |
|
"learning_rate": 4.0004837810767294e-05, |
|
"loss": 0.455, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.210752688172043, |
|
"grad_norm": 0.2855359315189352, |
|
"learning_rate": 3.9206652169898364e-05, |
|
"loss": 0.4619, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.2193548387096773, |
|
"grad_norm": 0.2715169650377671, |
|
"learning_rate": 3.841456362650837e-05, |
|
"loss": 0.4615, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.227956989247312, |
|
"grad_norm": 0.2628755048951637, |
|
"learning_rate": 3.7628651620885444e-05, |
|
"loss": 0.4645, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.236559139784946, |
|
"grad_norm": 0.28592361434576347, |
|
"learning_rate": 3.6848994973859105e-05, |
|
"loss": 0.4692, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.2451612903225806, |
|
"grad_norm": 0.26764492546621405, |
|
"learning_rate": 3.607567187889538e-05, |
|
"loss": 0.4563, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.253763440860215, |
|
"grad_norm": 0.271421407542708, |
|
"learning_rate": 3.5308759894254496e-05, |
|
"loss": 0.4671, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.2623655913978493, |
|
"grad_norm": 0.2653346076557134, |
|
"learning_rate": 3.45483359352125e-05, |
|
"loss": 0.4535, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.270967741935484, |
|
"grad_norm": 0.2672124362703016, |
|
"learning_rate": 3.379447626634712e-05, |
|
"loss": 0.4523, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.279569892473118, |
|
"grad_norm": 0.2815570906050588, |
|
"learning_rate": 3.304725649388919e-05, |
|
"loss": 0.468, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.2881720430107526, |
|
"grad_norm": 0.27533793807613155, |
|
"learning_rate": 3.230675155813979e-05, |
|
"loss": 0.4587, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.296774193548387, |
|
"grad_norm": 0.2718010567194757, |
|
"learning_rate": 3.1573035725954344e-05, |
|
"loss": 0.4518, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.3053763440860213, |
|
"grad_norm": 0.26881948505806064, |
|
"learning_rate": 3.084618258329443e-05, |
|
"loss": 0.4511, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.313978494623656, |
|
"grad_norm": 0.2735905144552066, |
|
"learning_rate": 3.012626502784729e-05, |
|
"loss": 0.4437, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.3225806451612905, |
|
"grad_norm": 0.2753569674239171, |
|
"learning_rate": 2.9413355261715192e-05, |
|
"loss": 0.457, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.3311827956989246, |
|
"grad_norm": 0.27264140236415646, |
|
"learning_rate": 2.87075247841738e-05, |
|
"loss": 0.4562, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.339784946236559, |
|
"grad_norm": 0.2750362048193341, |
|
"learning_rate": 2.8008844384501566e-05, |
|
"loss": 0.4578, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.3483870967741938, |
|
"grad_norm": 0.2743768587791329, |
|
"learning_rate": 2.7317384134879965e-05, |
|
"loss": 0.4569, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.356989247311828, |
|
"grad_norm": 0.27994254494622967, |
|
"learning_rate": 2.6633213383365906e-05, |
|
"loss": 0.4554, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.3655913978494625, |
|
"grad_norm": 0.273412382006339, |
|
"learning_rate": 2.595640074693664e-05, |
|
"loss": 0.4521, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.3741935483870966, |
|
"grad_norm": 0.26693176903025, |
|
"learning_rate": 2.5287014104607975e-05, |
|
"loss": 0.4472, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.382795698924731, |
|
"grad_norm": 0.26563176753886897, |
|
"learning_rate": 2.4625120590626595e-05, |
|
"loss": 0.4487, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.3913978494623658, |
|
"grad_norm": 0.27008804180981577, |
|
"learning_rate": 2.397078658773699e-05, |
|
"loss": 0.4611, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.2624172113992881, |
|
"learning_rate": 2.3324077720523785e-05, |
|
"loss": 0.4461, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.4086021505376345, |
|
"grad_norm": 0.27677007427727796, |
|
"learning_rate": 2.2685058848830076e-05, |
|
"loss": 0.4536, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.4172043010752686, |
|
"grad_norm": 0.27569949386731046, |
|
"learning_rate": 2.2053794061252675e-05, |
|
"loss": 0.4474, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.425806451612903, |
|
"grad_norm": 0.267987384900051, |
|
"learning_rate": 2.1430346668714175e-05, |
|
"loss": 0.4487, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.434408602150538, |
|
"grad_norm": 0.2669869480120869, |
|
"learning_rate": 2.0814779198113687e-05, |
|
"loss": 0.455, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.443010752688172, |
|
"grad_norm": 0.26354591329030985, |
|
"learning_rate": 2.020715338605581e-05, |
|
"loss": 0.4616, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.4516129032258065, |
|
"grad_norm": 0.2785720489510887, |
|
"learning_rate": 1.9607530172658715e-05, |
|
"loss": 0.4419, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.4602150537634406, |
|
"grad_norm": 0.27851326696395934, |
|
"learning_rate": 1.9015969695442704e-05, |
|
"loss": 0.4572, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.468817204301075, |
|
"grad_norm": 0.27236418612561275, |
|
"learning_rate": 1.8432531283298458e-05, |
|
"loss": 0.453, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.47741935483871, |
|
"grad_norm": 0.2816067593156165, |
|
"learning_rate": 1.7857273450537227e-05, |
|
"loss": 0.4607, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.486021505376344, |
|
"grad_norm": 0.27424768533392857, |
|
"learning_rate": 1.7290253891022e-05, |
|
"loss": 0.4385, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.4946236559139785, |
|
"grad_norm": 0.2766562755277204, |
|
"learning_rate": 1.673152947238139e-05, |
|
"loss": 0.4567, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.5032258064516126, |
|
"grad_norm": 0.2614813998070769, |
|
"learning_rate": 1.618115623030625e-05, |
|
"loss": 0.4428, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.511827956989247, |
|
"grad_norm": 0.272212168117098, |
|
"learning_rate": 1.5639189362929695e-05, |
|
"loss": 0.453, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.520430107526882, |
|
"grad_norm": 0.2747575768340182, |
|
"learning_rate": 1.5105683225291211e-05, |
|
"loss": 0.4586, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.5290322580645164, |
|
"grad_norm": 0.2711344401379175, |
|
"learning_rate": 1.4580691323885209e-05, |
|
"loss": 0.4506, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.5376344086021505, |
|
"grad_norm": 0.27821711929840565, |
|
"learning_rate": 1.4064266311294793e-05, |
|
"loss": 0.4669, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.546236559139785, |
|
"grad_norm": 0.2807569627250837, |
|
"learning_rate": 1.3556459980911085e-05, |
|
"loss": 0.4567, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.554838709677419, |
|
"grad_norm": 0.27478294823892596, |
|
"learning_rate": 1.305732326173882e-05, |
|
"loss": 0.4629, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.563440860215054, |
|
"grad_norm": 0.2694458161205595, |
|
"learning_rate": 1.2566906213288388e-05, |
|
"loss": 0.4518, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.5720430107526884, |
|
"grad_norm": 0.26379184313364523, |
|
"learning_rate": 1.2085258020555556e-05, |
|
"loss": 0.4489, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.5806451612903225, |
|
"grad_norm": 0.27978762666210216, |
|
"learning_rate": 1.1612426989088232e-05, |
|
"loss": 0.4493, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.589247311827957, |
|
"grad_norm": 0.28005079300575625, |
|
"learning_rate": 1.1148460540142125e-05, |
|
"loss": 0.4501, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.5978494623655912, |
|
"grad_norm": 0.2765800997942907, |
|
"learning_rate": 1.0693405205924579e-05, |
|
"loss": 0.4599, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.606451612903226, |
|
"grad_norm": 0.26856411845690764, |
|
"learning_rate": 1.0247306624927789e-05, |
|
"loss": 0.4489, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.6150537634408604, |
|
"grad_norm": 0.2758438510594412, |
|
"learning_rate": 9.810209537351645e-06, |
|
"loss": 0.4427, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.6236559139784945, |
|
"grad_norm": 0.2808673768423207, |
|
"learning_rate": 9.382157780616606e-06, |
|
"loss": 0.4369, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.632258064516129, |
|
"grad_norm": 0.26355013415952777, |
|
"learning_rate": 8.963194284967202e-06, |
|
"loss": 0.4543, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.6408602150537632, |
|
"grad_norm": 0.26682463219677827, |
|
"learning_rate": 8.553361069166388e-06, |
|
"loss": 0.4448, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.649462365591398, |
|
"grad_norm": 0.2824919005760602, |
|
"learning_rate": 8.15269923628147e-06, |
|
"loss": 0.463, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.6580645161290324, |
|
"grad_norm": 0.2606472077942361, |
|
"learning_rate": 7.761248969561729e-06, |
|
"loss": 0.4435, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.2716672911173004, |
|
"learning_rate": 7.379049528408433e-06, |
|
"loss": 0.4436, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.675268817204301, |
|
"grad_norm": 0.2692189798747163, |
|
"learning_rate": 7.00613924443726e-06, |
|
"loss": 0.456, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.6838709677419352, |
|
"grad_norm": 0.2751219149255623, |
|
"learning_rate": 6.642555517634197e-06, |
|
"loss": 0.4486, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.69247311827957, |
|
"grad_norm": 0.26558006981794996, |
|
"learning_rate": 6.288334812604324e-06, |
|
"loss": 0.448, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.7010752688172044, |
|
"grad_norm": 0.2859552474881587, |
|
"learning_rate": 5.943512654914951e-06, |
|
"loss": 0.4357, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.709677419354839, |
|
"grad_norm": 0.27548323875619696, |
|
"learning_rate": 5.6081236275325355e-06, |
|
"loss": 0.4624, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.718279569892473, |
|
"grad_norm": 0.2806542534666262, |
|
"learning_rate": 5.282201367354245e-06, |
|
"loss": 0.4505, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.7268817204301077, |
|
"grad_norm": 0.26475293341781525, |
|
"learning_rate": 4.965778561834644e-06, |
|
"loss": 0.4386, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.735483870967742, |
|
"grad_norm": 0.28440947626379526, |
|
"learning_rate": 4.658886945707164e-06, |
|
"loss": 0.4542, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.7440860215053764, |
|
"grad_norm": 0.2696410246213196, |
|
"learning_rate": 4.361557297801499e-06, |
|
"loss": 0.4416, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.752688172043011, |
|
"grad_norm": 0.27708040093077324, |
|
"learning_rate": 4.073819437956694e-06, |
|
"loss": 0.4471, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.761290322580645, |
|
"grad_norm": 0.2615573424318419, |
|
"learning_rate": 3.7957022240304173e-06, |
|
"loss": 0.4505, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.7698924731182797, |
|
"grad_norm": 0.2776744589099636, |
|
"learning_rate": 3.5272335490047937e-06, |
|
"loss": 0.4416, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.778494623655914, |
|
"grad_norm": 0.28362883428775265, |
|
"learning_rate": 3.2684403381889272e-06, |
|
"loss": 0.4562, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.7870967741935484, |
|
"grad_norm": 0.2659089658104915, |
|
"learning_rate": 3.019348546518508e-06, |
|
"loss": 0.4542, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.795698924731183, |
|
"grad_norm": 0.27817617897785124, |
|
"learning_rate": 2.7799831559527258e-06, |
|
"loss": 0.4535, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.804301075268817, |
|
"grad_norm": 0.2767257855534767, |
|
"learning_rate": 2.550368172968809e-06, |
|
"loss": 0.4438, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.8129032258064517, |
|
"grad_norm": 0.2659520383981123, |
|
"learning_rate": 2.3305266261542945e-06, |
|
"loss": 0.4648, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.821505376344086, |
|
"grad_norm": 0.26860959961537617, |
|
"learning_rate": 2.1204805638975646e-06, |
|
"loss": 0.4478, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.8301075268817204, |
|
"grad_norm": 0.2772290842479072, |
|
"learning_rate": 1.9202510521763696e-06, |
|
"loss": 0.4556, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.838709677419355, |
|
"grad_norm": 0.2773729267266774, |
|
"learning_rate": 1.7298581724452978e-06, |
|
"loss": 0.4546, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.847311827956989, |
|
"grad_norm": 0.288243164539903, |
|
"learning_rate": 1.5493210196216079e-06, |
|
"loss": 0.457, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.8559139784946237, |
|
"grad_norm": 0.27792696429990915, |
|
"learning_rate": 1.378657700170205e-06, |
|
"loss": 0.4585, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.864516129032258, |
|
"grad_norm": 0.27882422779333554, |
|
"learning_rate": 1.2178853302877159e-06, |
|
"loss": 0.4486, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.8731182795698924, |
|
"grad_norm": 0.28133695329057534, |
|
"learning_rate": 1.0670200341858394e-06, |
|
"loss": 0.4502, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.881720430107527, |
|
"grad_norm": 0.2859131700306455, |
|
"learning_rate": 9.260769424742633e-07, |
|
"loss": 0.4452, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.8903225806451616, |
|
"grad_norm": 0.26707708683830167, |
|
"learning_rate": 7.950701906431324e-07, |
|
"loss": 0.4498, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.8989247311827957, |
|
"grad_norm": 0.2716338526967156, |
|
"learning_rate": 6.740129176453725e-07, |
|
"loss": 0.4568, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.90752688172043, |
|
"grad_norm": 0.29044410591630965, |
|
"learning_rate": 5.629172645789882e-07, |
|
"loss": 0.4517, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.9161290322580644, |
|
"grad_norm": 0.27054199049752875, |
|
"learning_rate": 4.617943734694152e-07, |
|
"loss": 0.4559, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.924731182795699, |
|
"grad_norm": 0.27515263341966606, |
|
"learning_rate": 3.7065438615198066e-07, |
|
"loss": 0.4576, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.9333333333333336, |
|
"grad_norm": 0.2653013119464242, |
|
"learning_rate": 2.8950644325485e-07, |
|
"loss": 0.449, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.9419354838709677, |
|
"grad_norm": 0.2791971697586357, |
|
"learning_rate": 2.183586832822493e-07, |
|
"loss": 0.4562, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.9505376344086023, |
|
"grad_norm": 0.2668878384341789, |
|
"learning_rate": 1.572182417982515e-07, |
|
"loss": 0.447, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.9591397849462364, |
|
"grad_norm": 0.2759015749441583, |
|
"learning_rate": 1.0609125071109338e-07, |
|
"loss": 0.4567, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.967741935483871, |
|
"grad_norm": 0.283562302360861, |
|
"learning_rate": 6.49828376582673e-08, |
|
"loss": 0.4503, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.9763440860215056, |
|
"grad_norm": 0.28059419780497596, |
|
"learning_rate": 3.3897125492188266e-08, |
|
"loss": 0.4525, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.9849462365591397, |
|
"grad_norm": 0.2757143204657877, |
|
"learning_rate": 1.2837231866746902e-08, |
|
"loss": 0.4455, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.9935483870967743, |
|
"grad_norm": 0.26729797929321086, |
|
"learning_rate": 1.8052689246150779e-09, |
|
"loss": 0.4443, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.9987096774193547, |
|
"step": 3486, |
|
"total_flos": 6514990399881216.0, |
|
"train_loss": 0.5279938033170147, |
|
"train_runtime": 9440.7969, |
|
"train_samples_per_second": 5.91, |
|
"train_steps_per_second": 0.369 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3486, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6514990399881216.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|