|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9996544063194273, |
|
"eval_steps": 500, |
|
"global_step": 2531, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003949642063687978, |
|
"grad_norm": 0.7681758999824524, |
|
"learning_rate": 3e-05, |
|
"loss": 3.3209, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.007899284127375956, |
|
"grad_norm": 0.7526655793190002, |
|
"learning_rate": 2.9999953706169412e-05, |
|
"loss": 0.1659, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.011848926191063935, |
|
"grad_norm": 0.573715090751648, |
|
"learning_rate": 2.999981482496339e-05, |
|
"loss": 0.1512, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.015798568254751913, |
|
"grad_norm": 0.602210521697998, |
|
"learning_rate": 2.9999583357239188e-05, |
|
"loss": 0.1438, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01974821031843989, |
|
"grad_norm": 0.6056187748908997, |
|
"learning_rate": 2.9999259304425536e-05, |
|
"loss": 0.1456, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02369785238212787, |
|
"grad_norm": 0.6448855400085449, |
|
"learning_rate": 2.9998842668522657e-05, |
|
"loss": 0.1573, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.027647494445815848, |
|
"grad_norm": 0.5019297003746033, |
|
"learning_rate": 2.9998333452102237e-05, |
|
"loss": 0.1432, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.031597136509503826, |
|
"grad_norm": 0.5146121382713318, |
|
"learning_rate": 2.9997731658307427e-05, |
|
"loss": 0.1414, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03554677857319181, |
|
"grad_norm": 0.5668686628341675, |
|
"learning_rate": 2.99970372908528e-05, |
|
"loss": 0.1406, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03949642063687978, |
|
"grad_norm": 0.5978260040283203, |
|
"learning_rate": 2.9996250354024345e-05, |
|
"loss": 0.1389, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.043446062700567764, |
|
"grad_norm": 0.7445759773254395, |
|
"learning_rate": 2.9995370852679447e-05, |
|
"loss": 0.1468, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04739570476425574, |
|
"grad_norm": 0.6418613791465759, |
|
"learning_rate": 2.9994398792246826e-05, |
|
"loss": 0.1486, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05134534682794372, |
|
"grad_norm": 0.4584648907184601, |
|
"learning_rate": 2.9993334178726546e-05, |
|
"loss": 0.1404, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.055294988891631695, |
|
"grad_norm": 0.5104970932006836, |
|
"learning_rate": 2.9992177018689935e-05, |
|
"loss": 0.1398, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05924463095531968, |
|
"grad_norm": 0.6897855401039124, |
|
"learning_rate": 2.9990927319279584e-05, |
|
"loss": 0.1546, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06319427301900765, |
|
"grad_norm": 0.5376682281494141, |
|
"learning_rate": 2.998958508820927e-05, |
|
"loss": 0.15, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06714391508269563, |
|
"grad_norm": 0.5601758360862732, |
|
"learning_rate": 2.9988150333763933e-05, |
|
"loss": 0.1471, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07109355714638362, |
|
"grad_norm": 0.4657880961894989, |
|
"learning_rate": 2.998662306479961e-05, |
|
"loss": 0.1394, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07504319921007159, |
|
"grad_norm": 0.5285632014274597, |
|
"learning_rate": 2.9985003290743385e-05, |
|
"loss": 0.1452, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07899284127375956, |
|
"grad_norm": 0.4982571601867676, |
|
"learning_rate": 2.9983291021593326e-05, |
|
"loss": 0.1402, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08294248333744754, |
|
"grad_norm": 0.48557624220848083, |
|
"learning_rate": 2.998148626791844e-05, |
|
"loss": 0.142, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08689212540113553, |
|
"grad_norm": 0.5023711919784546, |
|
"learning_rate": 2.9979589040858586e-05, |
|
"loss": 0.1624, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.0908417674648235, |
|
"grad_norm": 0.47005024552345276, |
|
"learning_rate": 2.9977599352124413e-05, |
|
"loss": 0.155, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09479140952851148, |
|
"grad_norm": 0.5279797315597534, |
|
"learning_rate": 2.9975517213997302e-05, |
|
"loss": 0.1532, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.09874105159219945, |
|
"grad_norm": 0.43386149406433105, |
|
"learning_rate": 2.9973342639329272e-05, |
|
"loss": 0.1481, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.10269069365588744, |
|
"grad_norm": 0.5564565062522888, |
|
"learning_rate": 2.997107564154291e-05, |
|
"loss": 0.1358, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.10664033571957542, |
|
"grad_norm": 0.6061131358146667, |
|
"learning_rate": 2.996871623463128e-05, |
|
"loss": 0.1464, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.11058997778326339, |
|
"grad_norm": 0.5967995524406433, |
|
"learning_rate": 2.996626443315785e-05, |
|
"loss": 0.1451, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.11453961984695137, |
|
"grad_norm": 0.5291288495063782, |
|
"learning_rate": 2.9963720252256387e-05, |
|
"loss": 0.1436, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.11848926191063935, |
|
"grad_norm": 0.5956757068634033, |
|
"learning_rate": 2.9961083707630877e-05, |
|
"loss": 0.1492, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.12243890397432733, |
|
"grad_norm": 0.5079193711280823, |
|
"learning_rate": 2.9958354815555426e-05, |
|
"loss": 0.1388, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1263885460380153, |
|
"grad_norm": 0.44773226976394653, |
|
"learning_rate": 2.995553359287414e-05, |
|
"loss": 0.1311, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.13033818810170328, |
|
"grad_norm": 0.5200162529945374, |
|
"learning_rate": 2.9952620057001055e-05, |
|
"loss": 0.1401, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.13428783016539125, |
|
"grad_norm": 0.5840851068496704, |
|
"learning_rate": 2.994961422591999e-05, |
|
"loss": 0.1484, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.13823747222907923, |
|
"grad_norm": 0.5336028933525085, |
|
"learning_rate": 2.9946516118184484e-05, |
|
"loss": 0.1298, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.14218711429276723, |
|
"grad_norm": 0.7243465781211853, |
|
"learning_rate": 2.9943325752917633e-05, |
|
"loss": 0.1463, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.1461367563564552, |
|
"grad_norm": 0.5500628352165222, |
|
"learning_rate": 2.9940043149812006e-05, |
|
"loss": 0.1465, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.15008639842014318, |
|
"grad_norm": 0.5050541162490845, |
|
"learning_rate": 2.993666832912949e-05, |
|
"loss": 0.1434, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.15403604048383115, |
|
"grad_norm": 0.5059782266616821, |
|
"learning_rate": 2.9933201311701222e-05, |
|
"loss": 0.1385, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.15798568254751913, |
|
"grad_norm": 0.5439670085906982, |
|
"learning_rate": 2.9929642118927397e-05, |
|
"loss": 0.1421, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1619353246112071, |
|
"grad_norm": 0.5325440168380737, |
|
"learning_rate": 2.992599077277717e-05, |
|
"loss": 0.1482, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.16588496667489508, |
|
"grad_norm": 0.5057934522628784, |
|
"learning_rate": 2.992224729578851e-05, |
|
"loss": 0.1415, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.16983460873858305, |
|
"grad_norm": 0.5029751062393188, |
|
"learning_rate": 2.9918411711068074e-05, |
|
"loss": 0.1517, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.17378425080227106, |
|
"grad_norm": 0.38729503750801086, |
|
"learning_rate": 2.9914484042291053e-05, |
|
"loss": 0.1367, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.17773389286595903, |
|
"grad_norm": 0.4877079725265503, |
|
"learning_rate": 2.991046431370102e-05, |
|
"loss": 0.1446, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.181683534929647, |
|
"grad_norm": 0.48335975408554077, |
|
"learning_rate": 2.9906352550109787e-05, |
|
"loss": 0.1372, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.18563317699333498, |
|
"grad_norm": 0.48935794830322266, |
|
"learning_rate": 2.990214877689727e-05, |
|
"loss": 0.144, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.18958281905702296, |
|
"grad_norm": 0.44793424010276794, |
|
"learning_rate": 2.9897853020011298e-05, |
|
"loss": 0.1298, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.19353246112071093, |
|
"grad_norm": 0.6513413190841675, |
|
"learning_rate": 2.9893465305967483e-05, |
|
"loss": 0.1361, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1974821031843989, |
|
"grad_norm": 0.5480667948722839, |
|
"learning_rate": 2.9888985661849028e-05, |
|
"loss": 0.1497, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.20143174524808688, |
|
"grad_norm": 0.4912254810333252, |
|
"learning_rate": 2.988441411530659e-05, |
|
"loss": 0.1461, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.20538138731177488, |
|
"grad_norm": 0.4925342798233032, |
|
"learning_rate": 2.987975069455809e-05, |
|
"loss": 0.1418, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.20933102937546286, |
|
"grad_norm": 0.46876174211502075, |
|
"learning_rate": 2.987499542838854e-05, |
|
"loss": 0.1409, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.21328067143915083, |
|
"grad_norm": 0.5577364563941956, |
|
"learning_rate": 2.9870148346149865e-05, |
|
"loss": 0.1423, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2172303135028388, |
|
"grad_norm": 0.48950615525245667, |
|
"learning_rate": 2.9865209477760746e-05, |
|
"loss": 0.1367, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.22117995556652678, |
|
"grad_norm": 0.4494156241416931, |
|
"learning_rate": 2.9860178853706397e-05, |
|
"loss": 0.1384, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.22512959763021476, |
|
"grad_norm": 0.4439913034439087, |
|
"learning_rate": 2.9855056505038395e-05, |
|
"loss": 0.1447, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.22907923969390273, |
|
"grad_norm": 0.5027551054954529, |
|
"learning_rate": 2.984984246337449e-05, |
|
"loss": 0.1526, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.2330288817575907, |
|
"grad_norm": 0.4503665566444397, |
|
"learning_rate": 2.984453676089842e-05, |
|
"loss": 0.1333, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2369785238212787, |
|
"grad_norm": 0.5127356052398682, |
|
"learning_rate": 2.9839139430359684e-05, |
|
"loss": 0.1372, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.24092816588496668, |
|
"grad_norm": 0.672027051448822, |
|
"learning_rate": 2.983365050507336e-05, |
|
"loss": 0.1359, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.24487780794865466, |
|
"grad_norm": 0.5182546377182007, |
|
"learning_rate": 2.9828070018919902e-05, |
|
"loss": 0.1504, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.24882745001234263, |
|
"grad_norm": 0.5020663738250732, |
|
"learning_rate": 2.9822398006344923e-05, |
|
"loss": 0.1416, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2527770920760306, |
|
"grad_norm": 0.41956228017807007, |
|
"learning_rate": 2.9816634502358976e-05, |
|
"loss": 0.1306, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2567267341397186, |
|
"grad_norm": 0.42414599657058716, |
|
"learning_rate": 2.9810779542537357e-05, |
|
"loss": 0.1412, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.26067637620340656, |
|
"grad_norm": 0.5931263566017151, |
|
"learning_rate": 2.9804833163019866e-05, |
|
"loss": 0.1354, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.26462601826709453, |
|
"grad_norm": 0.49056369066238403, |
|
"learning_rate": 2.9798795400510588e-05, |
|
"loss": 0.1313, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.2685756603307825, |
|
"grad_norm": 0.5098786950111389, |
|
"learning_rate": 2.9792666292277687e-05, |
|
"loss": 0.1264, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.2725253023944705, |
|
"grad_norm": 0.5509768128395081, |
|
"learning_rate": 2.9786445876153147e-05, |
|
"loss": 0.1418, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.27647494445815846, |
|
"grad_norm": 0.6949456930160522, |
|
"learning_rate": 2.978013419053255e-05, |
|
"loss": 0.1399, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.28042458652184643, |
|
"grad_norm": 0.5233505368232727, |
|
"learning_rate": 2.9773731274374847e-05, |
|
"loss": 0.1415, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.28437422858553446, |
|
"grad_norm": 0.539608895778656, |
|
"learning_rate": 2.9767237167202105e-05, |
|
"loss": 0.1458, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.28832387064922244, |
|
"grad_norm": 0.4299115240573883, |
|
"learning_rate": 2.976065190909927e-05, |
|
"loss": 0.1351, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.2922735127129104, |
|
"grad_norm": 0.46829068660736084, |
|
"learning_rate": 2.975397554071392e-05, |
|
"loss": 0.1349, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.2962231547765984, |
|
"grad_norm": 0.490376353263855, |
|
"learning_rate": 2.9747208103256007e-05, |
|
"loss": 0.1439, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.30017279684028636, |
|
"grad_norm": 0.5873934030532837, |
|
"learning_rate": 2.9740349638497614e-05, |
|
"loss": 0.1395, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.30412243890397433, |
|
"grad_norm": 0.5811406373977661, |
|
"learning_rate": 2.973340018877269e-05, |
|
"loss": 0.1342, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3080720809676623, |
|
"grad_norm": 0.5323910713195801, |
|
"learning_rate": 2.972635979697678e-05, |
|
"loss": 0.1471, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.3120217230313503, |
|
"grad_norm": 0.5084981918334961, |
|
"learning_rate": 2.9719228506566792e-05, |
|
"loss": 0.1296, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.31597136509503826, |
|
"grad_norm": 0.5692681670188904, |
|
"learning_rate": 2.9712006361560685e-05, |
|
"loss": 0.1341, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.31992100715872623, |
|
"grad_norm": 0.525729775428772, |
|
"learning_rate": 2.9704693406537222e-05, |
|
"loss": 0.1454, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.3238706492224142, |
|
"grad_norm": 0.4046003520488739, |
|
"learning_rate": 2.9697289686635703e-05, |
|
"loss": 0.1342, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.3278202912861022, |
|
"grad_norm": 0.47330015897750854, |
|
"learning_rate": 2.968979524755567e-05, |
|
"loss": 0.1417, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.33176993334979016, |
|
"grad_norm": 0.4547816812992096, |
|
"learning_rate": 2.968221013555662e-05, |
|
"loss": 0.1298, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.33571957541347813, |
|
"grad_norm": 0.4318365156650543, |
|
"learning_rate": 2.9674534397457747e-05, |
|
"loss": 0.1454, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.3396692174771661, |
|
"grad_norm": 0.4720039665699005, |
|
"learning_rate": 2.9666768080637622e-05, |
|
"loss": 0.1321, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3436188595408541, |
|
"grad_norm": 0.5289425849914551, |
|
"learning_rate": 2.965891123303392e-05, |
|
"loss": 0.1301, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3475685016045421, |
|
"grad_norm": 0.49078208208084106, |
|
"learning_rate": 2.9650963903143124e-05, |
|
"loss": 0.1452, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.3515181436682301, |
|
"grad_norm": 0.7317320704460144, |
|
"learning_rate": 2.9642926140020203e-05, |
|
"loss": 0.1516, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.35546778573191806, |
|
"grad_norm": 0.5247913599014282, |
|
"learning_rate": 2.9634797993278337e-05, |
|
"loss": 0.1408, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.35941742779560604, |
|
"grad_norm": 0.5061410665512085, |
|
"learning_rate": 2.9626579513088606e-05, |
|
"loss": 0.1396, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.363367069859294, |
|
"grad_norm": 0.5871759057044983, |
|
"learning_rate": 2.9618270750179665e-05, |
|
"loss": 0.152, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.367316711922982, |
|
"grad_norm": 0.4584594666957855, |
|
"learning_rate": 2.9609871755837436e-05, |
|
"loss": 0.1274, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.37126635398666996, |
|
"grad_norm": 0.405700147151947, |
|
"learning_rate": 2.9601382581904816e-05, |
|
"loss": 0.1284, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.37521599605035794, |
|
"grad_norm": 0.4593953788280487, |
|
"learning_rate": 2.9592803280781306e-05, |
|
"loss": 0.1359, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.3791656381140459, |
|
"grad_norm": 0.5641497373580933, |
|
"learning_rate": 2.9584133905422744e-05, |
|
"loss": 0.1454, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3831152801777339, |
|
"grad_norm": 0.3449844419956207, |
|
"learning_rate": 2.9575374509340935e-05, |
|
"loss": 0.1385, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.38706492224142186, |
|
"grad_norm": 0.46647313237190247, |
|
"learning_rate": 2.956652514660336e-05, |
|
"loss": 0.1328, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.39101456430510984, |
|
"grad_norm": 0.4442940950393677, |
|
"learning_rate": 2.9557585871832787e-05, |
|
"loss": 0.1379, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.3949642063687978, |
|
"grad_norm": 0.5622376799583435, |
|
"learning_rate": 2.9548556740207e-05, |
|
"loss": 0.1525, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3989138484324858, |
|
"grad_norm": 0.5095304250717163, |
|
"learning_rate": 2.9539437807458404e-05, |
|
"loss": 0.1317, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.40286349049617376, |
|
"grad_norm": 0.36938050389289856, |
|
"learning_rate": 2.9530229129873715e-05, |
|
"loss": 0.1361, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.4068131325598618, |
|
"grad_norm": 0.4959389865398407, |
|
"learning_rate": 2.9520930764293586e-05, |
|
"loss": 0.1475, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.41076277462354976, |
|
"grad_norm": 0.5631204843521118, |
|
"learning_rate": 2.9511542768112284e-05, |
|
"loss": 0.1391, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.41471241668723774, |
|
"grad_norm": 0.4227543771266937, |
|
"learning_rate": 2.9502065199277312e-05, |
|
"loss": 0.1402, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.4186620587509257, |
|
"grad_norm": 0.43038052320480347, |
|
"learning_rate": 2.9492498116289072e-05, |
|
"loss": 0.1239, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.4226117008146137, |
|
"grad_norm": 0.5115047097206116, |
|
"learning_rate": 2.9482841578200486e-05, |
|
"loss": 0.1417, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.42656134287830166, |
|
"grad_norm": 0.4372217059135437, |
|
"learning_rate": 2.9473095644616634e-05, |
|
"loss": 0.139, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.43051098494198964, |
|
"grad_norm": 0.792674720287323, |
|
"learning_rate": 2.94632603756944e-05, |
|
"loss": 0.1355, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.4344606270056776, |
|
"grad_norm": 0.46272650361061096, |
|
"learning_rate": 2.945333583214208e-05, |
|
"loss": 0.1513, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.4384102690693656, |
|
"grad_norm": 0.3727450668811798, |
|
"learning_rate": 2.9443322075219036e-05, |
|
"loss": 0.1317, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.44235991113305356, |
|
"grad_norm": 0.39475393295288086, |
|
"learning_rate": 2.9433219166735285e-05, |
|
"loss": 0.126, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.44630955319674154, |
|
"grad_norm": 0.5749325156211853, |
|
"learning_rate": 2.9423027169051134e-05, |
|
"loss": 0.1509, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.4502591952604295, |
|
"grad_norm": 0.44618451595306396, |
|
"learning_rate": 2.9412746145076804e-05, |
|
"loss": 0.1257, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.4542088373241175, |
|
"grad_norm": 0.46040260791778564, |
|
"learning_rate": 2.9402376158272026e-05, |
|
"loss": 0.1306, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.45815847938780546, |
|
"grad_norm": 0.6470154523849487, |
|
"learning_rate": 2.9391917272645654e-05, |
|
"loss": 0.147, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.46210812145149344, |
|
"grad_norm": 0.4042102098464966, |
|
"learning_rate": 2.9381369552755268e-05, |
|
"loss": 0.1358, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.4660577635151814, |
|
"grad_norm": 0.5040680766105652, |
|
"learning_rate": 2.937073306370679e-05, |
|
"loss": 0.1364, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.47000740557886944, |
|
"grad_norm": 0.44574257731437683, |
|
"learning_rate": 2.936000787115406e-05, |
|
"loss": 0.1468, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.4739570476425574, |
|
"grad_norm": 0.4155598282814026, |
|
"learning_rate": 2.9349194041298437e-05, |
|
"loss": 0.138, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.4779066897062454, |
|
"grad_norm": 0.43807128071784973, |
|
"learning_rate": 2.9338291640888413e-05, |
|
"loss": 0.1376, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.48185633176993337, |
|
"grad_norm": 0.6164836883544922, |
|
"learning_rate": 2.9327300737219164e-05, |
|
"loss": 0.1415, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.48580597383362134, |
|
"grad_norm": 0.4064141511917114, |
|
"learning_rate": 2.9316221398132163e-05, |
|
"loss": 0.1457, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.4897556158973093, |
|
"grad_norm": 0.47821277379989624, |
|
"learning_rate": 2.930505369201475e-05, |
|
"loss": 0.144, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.4937052579609973, |
|
"grad_norm": 0.4229309558868408, |
|
"learning_rate": 2.9293797687799717e-05, |
|
"loss": 0.1286, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.49765490002468526, |
|
"grad_norm": 0.42858126759529114, |
|
"learning_rate": 2.9282453454964856e-05, |
|
"loss": 0.1388, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.5016045420883732, |
|
"grad_norm": 0.47248193621635437, |
|
"learning_rate": 2.9271021063532586e-05, |
|
"loss": 0.1279, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.5055541841520612, |
|
"grad_norm": 0.5147600769996643, |
|
"learning_rate": 2.9259500584069444e-05, |
|
"loss": 0.1281, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.5095038262157492, |
|
"grad_norm": 0.4137686789035797, |
|
"learning_rate": 2.924789208768573e-05, |
|
"loss": 0.1441, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.5134534682794372, |
|
"grad_norm": 0.484967440366745, |
|
"learning_rate": 2.923619564603501e-05, |
|
"loss": 0.1328, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5174031103431251, |
|
"grad_norm": 0.5038776397705078, |
|
"learning_rate": 2.922441133131369e-05, |
|
"loss": 0.1442, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.5213527524068131, |
|
"grad_norm": 0.4918186366558075, |
|
"learning_rate": 2.921253921626058e-05, |
|
"loss": 0.1285, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.5253023944705011, |
|
"grad_norm": 0.447346568107605, |
|
"learning_rate": 2.9200579374156447e-05, |
|
"loss": 0.1261, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.5292520365341891, |
|
"grad_norm": 0.4736550748348236, |
|
"learning_rate": 2.9188531878823532e-05, |
|
"loss": 0.133, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.533201678597877, |
|
"grad_norm": 0.586494505405426, |
|
"learning_rate": 2.9176396804625135e-05, |
|
"loss": 0.1409, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.537151320661565, |
|
"grad_norm": 0.49870672821998596, |
|
"learning_rate": 2.9164174226465134e-05, |
|
"loss": 0.1444, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.541100962725253, |
|
"grad_norm": 0.404547780752182, |
|
"learning_rate": 2.9151864219787522e-05, |
|
"loss": 0.1303, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.545050604788941, |
|
"grad_norm": 0.42132681608200073, |
|
"learning_rate": 2.913946686057595e-05, |
|
"loss": 0.1276, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5490002468526289, |
|
"grad_norm": 0.4928096830844879, |
|
"learning_rate": 2.9126982225353243e-05, |
|
"loss": 0.1348, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.5529498889163169, |
|
"grad_norm": 0.44450655579566956, |
|
"learning_rate": 2.911441039118095e-05, |
|
"loss": 0.1417, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.5568995309800049, |
|
"grad_norm": 0.5710647702217102, |
|
"learning_rate": 2.910175143565886e-05, |
|
"loss": 0.1284, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.5608491730436929, |
|
"grad_norm": 0.3675592243671417, |
|
"learning_rate": 2.9089005436924506e-05, |
|
"loss": 0.1505, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.564798815107381, |
|
"grad_norm": 0.4794444441795349, |
|
"learning_rate": 2.90761724736527e-05, |
|
"loss": 0.1325, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.5687484571710689, |
|
"grad_norm": 0.5743889808654785, |
|
"learning_rate": 2.906325262505505e-05, |
|
"loss": 0.1358, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.5726980992347569, |
|
"grad_norm": 0.4955087900161743, |
|
"learning_rate": 2.9050245970879455e-05, |
|
"loss": 0.1387, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.5766477412984449, |
|
"grad_norm": 0.42035970091819763, |
|
"learning_rate": 2.9037152591409635e-05, |
|
"loss": 0.1369, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.5805973833621328, |
|
"grad_norm": 0.4199492335319519, |
|
"learning_rate": 2.9023972567464606e-05, |
|
"loss": 0.1461, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.5845470254258208, |
|
"grad_norm": 0.43724125623703003, |
|
"learning_rate": 2.9010705980398217e-05, |
|
"loss": 0.1219, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.5884966674895088, |
|
"grad_norm": 0.39386245608329773, |
|
"learning_rate": 2.8997352912098616e-05, |
|
"loss": 0.1255, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.5924463095531968, |
|
"grad_norm": 0.3640863597393036, |
|
"learning_rate": 2.8983913444987754e-05, |
|
"loss": 0.1273, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.5963959516168847, |
|
"grad_norm": 0.40772178769111633, |
|
"learning_rate": 2.8970387662020898e-05, |
|
"loss": 0.1326, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.6003455936805727, |
|
"grad_norm": 0.4535306990146637, |
|
"learning_rate": 2.895677564668608e-05, |
|
"loss": 0.1273, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.6042952357442607, |
|
"grad_norm": 0.5429089665412903, |
|
"learning_rate": 2.894307748300361e-05, |
|
"loss": 0.1245, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.6082448778079487, |
|
"grad_norm": 0.38951486349105835, |
|
"learning_rate": 2.8929293255525563e-05, |
|
"loss": 0.1437, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.6121945198716366, |
|
"grad_norm": 0.4131280183792114, |
|
"learning_rate": 2.8915423049335214e-05, |
|
"loss": 0.1249, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.6161441619353246, |
|
"grad_norm": 0.44423356652259827, |
|
"learning_rate": 2.890146695004657e-05, |
|
"loss": 0.1315, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.6200938039990126, |
|
"grad_norm": 0.4929848313331604, |
|
"learning_rate": 2.88874250438038e-05, |
|
"loss": 0.1399, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.6240434460627006, |
|
"grad_norm": 0.44524630904197693, |
|
"learning_rate": 2.8873297417280724e-05, |
|
"loss": 0.1304, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.6279930881263885, |
|
"grad_norm": 0.4765247404575348, |
|
"learning_rate": 2.885908415768027e-05, |
|
"loss": 0.1422, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.6319427301900765, |
|
"grad_norm": 0.44227954745292664, |
|
"learning_rate": 2.884478535273393e-05, |
|
"loss": 0.1573, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.6358923722537645, |
|
"grad_norm": 0.4304993152618408, |
|
"learning_rate": 2.8830401090701234e-05, |
|
"loss": 0.1365, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.6398420143174525, |
|
"grad_norm": 0.42231836915016174, |
|
"learning_rate": 2.8815931460369198e-05, |
|
"loss": 0.1328, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.6437916563811404, |
|
"grad_norm": 0.44187867641448975, |
|
"learning_rate": 2.880137655105176e-05, |
|
"loss": 0.1228, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.6477412984448284, |
|
"grad_norm": 0.433136910200119, |
|
"learning_rate": 2.8786736452589265e-05, |
|
"loss": 0.133, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.6516909405085164, |
|
"grad_norm": 0.4308445453643799, |
|
"learning_rate": 2.8772011255347875e-05, |
|
"loss": 0.127, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6556405825722044, |
|
"grad_norm": 0.4352281391620636, |
|
"learning_rate": 2.8757201050219027e-05, |
|
"loss": 0.1276, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.6595902246358923, |
|
"grad_norm": 0.450520396232605, |
|
"learning_rate": 2.874230592861887e-05, |
|
"loss": 0.1233, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6635398666995803, |
|
"grad_norm": 0.4648306369781494, |
|
"learning_rate": 2.8727325982487705e-05, |
|
"loss": 0.1243, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.6674895087632683, |
|
"grad_norm": 0.5166367888450623, |
|
"learning_rate": 2.871226130428941e-05, |
|
"loss": 0.1308, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.6714391508269563, |
|
"grad_norm": 0.6115042567253113, |
|
"learning_rate": 2.8697111987010868e-05, |
|
"loss": 0.1339, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.6753887928906442, |
|
"grad_norm": 0.3470801115036011, |
|
"learning_rate": 2.868187812416141e-05, |
|
"loss": 0.1305, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.6793384349543322, |
|
"grad_norm": 0.40242600440979004, |
|
"learning_rate": 2.8666559809772217e-05, |
|
"loss": 0.1325, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.6832880770180202, |
|
"grad_norm": 0.4116344749927521, |
|
"learning_rate": 2.8651157138395744e-05, |
|
"loss": 0.1385, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.6872377190817082, |
|
"grad_norm": 0.39455336332321167, |
|
"learning_rate": 2.863567020510515e-05, |
|
"loss": 0.1291, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.6911873611453963, |
|
"grad_norm": 0.49655675888061523, |
|
"learning_rate": 2.86200991054937e-05, |
|
"loss": 0.1363, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.6951370032090842, |
|
"grad_norm": 0.4002642035484314, |
|
"learning_rate": 2.8604443935674164e-05, |
|
"loss": 0.1421, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.6990866452727722, |
|
"grad_norm": 0.43481770157814026, |
|
"learning_rate": 2.8588704792278248e-05, |
|
"loss": 0.1254, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.7030362873364602, |
|
"grad_norm": 0.49691149592399597, |
|
"learning_rate": 2.8572881772455993e-05, |
|
"loss": 0.1393, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.7069859294001481, |
|
"grad_norm": 0.47778138518333435, |
|
"learning_rate": 2.8556974973875152e-05, |
|
"loss": 0.1387, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.7109355714638361, |
|
"grad_norm": 0.3887634873390198, |
|
"learning_rate": 2.854098449472061e-05, |
|
"loss": 0.1301, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.7148852135275241, |
|
"grad_norm": 0.3825758695602417, |
|
"learning_rate": 2.852491043369377e-05, |
|
"loss": 0.1292, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.7188348555912121, |
|
"grad_norm": 0.44277575612068176, |
|
"learning_rate": 2.8508752890011957e-05, |
|
"loss": 0.1263, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.7227844976549, |
|
"grad_norm": 0.54979008436203, |
|
"learning_rate": 2.849251196340777e-05, |
|
"loss": 0.1487, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.726734139718588, |
|
"grad_norm": 0.5191593170166016, |
|
"learning_rate": 2.847618775412851e-05, |
|
"loss": 0.1355, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.730683781782276, |
|
"grad_norm": 0.42348307371139526, |
|
"learning_rate": 2.8459780362935532e-05, |
|
"loss": 0.1356, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.734633423845964, |
|
"grad_norm": 0.41457122564315796, |
|
"learning_rate": 2.8443289891103634e-05, |
|
"loss": 0.1268, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.738583065909652, |
|
"grad_norm": 0.559184193611145, |
|
"learning_rate": 2.842671644042043e-05, |
|
"loss": 0.1273, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.7425327079733399, |
|
"grad_norm": 0.46100959181785583, |
|
"learning_rate": 2.8410060113185724e-05, |
|
"loss": 0.1357, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.7464823500370279, |
|
"grad_norm": 0.5634859204292297, |
|
"learning_rate": 2.8393321012210877e-05, |
|
"loss": 0.1271, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.7504319921007159, |
|
"grad_norm": 0.4173336327075958, |
|
"learning_rate": 2.8376499240818164e-05, |
|
"loss": 0.1302, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.7543816341644038, |
|
"grad_norm": 0.40243804454803467, |
|
"learning_rate": 2.8359594902840152e-05, |
|
"loss": 0.1333, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.7583312762280918, |
|
"grad_norm": 0.3762458562850952, |
|
"learning_rate": 2.8342608102619052e-05, |
|
"loss": 0.1271, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.7622809182917798, |
|
"grad_norm": 0.43715864419937134, |
|
"learning_rate": 2.832553894500607e-05, |
|
"loss": 0.1484, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.7662305603554678, |
|
"grad_norm": 0.3971126675605774, |
|
"learning_rate": 2.8308387535360763e-05, |
|
"loss": 0.1258, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.7701802024191557, |
|
"grad_norm": 0.40626007318496704, |
|
"learning_rate": 2.829115397955039e-05, |
|
"loss": 0.1336, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.7741298444828437, |
|
"grad_norm": 0.503835141658783, |
|
"learning_rate": 2.827383838394926e-05, |
|
"loss": 0.135, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.7780794865465317, |
|
"grad_norm": 0.5298701524734497, |
|
"learning_rate": 2.8256440855438074e-05, |
|
"loss": 0.1409, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.7820291286102197, |
|
"grad_norm": 0.5498703122138977, |
|
"learning_rate": 2.8238961501403266e-05, |
|
"loss": 0.1453, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.7859787706739076, |
|
"grad_norm": 0.4256785809993744, |
|
"learning_rate": 2.8221400429736332e-05, |
|
"loss": 0.1297, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.7899284127375956, |
|
"grad_norm": 0.38886457681655884, |
|
"learning_rate": 2.820375774883318e-05, |
|
"loss": 0.13, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.7938780548012836, |
|
"grad_norm": 0.5477973222732544, |
|
"learning_rate": 2.8186033567593445e-05, |
|
"loss": 0.1398, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.7978276968649716, |
|
"grad_norm": 0.4944402277469635, |
|
"learning_rate": 2.8168227995419828e-05, |
|
"loss": 0.1259, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.8017773389286595, |
|
"grad_norm": 0.4402163624763489, |
|
"learning_rate": 2.8150341142217407e-05, |
|
"loss": 0.1368, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.8057269809923475, |
|
"grad_norm": 0.4140058755874634, |
|
"learning_rate": 2.8132373118392985e-05, |
|
"loss": 0.1402, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.8096766230560355, |
|
"grad_norm": 0.5238107442855835, |
|
"learning_rate": 2.8114324034854378e-05, |
|
"loss": 0.1336, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.8136262651197236, |
|
"grad_norm": 0.45435237884521484, |
|
"learning_rate": 2.809619400300975e-05, |
|
"loss": 0.1421, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.8175759071834116, |
|
"grad_norm": 0.5276714563369751, |
|
"learning_rate": 2.8077983134766914e-05, |
|
"loss": 0.1234, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.8215255492470995, |
|
"grad_norm": 0.4083622395992279, |
|
"learning_rate": 2.8059691542532657e-05, |
|
"loss": 0.13, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.8254751913107875, |
|
"grad_norm": 0.3944040834903717, |
|
"learning_rate": 2.8041319339212017e-05, |
|
"loss": 0.1229, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.8294248333744755, |
|
"grad_norm": 0.5149401426315308, |
|
"learning_rate": 2.802286663820763e-05, |
|
"loss": 0.1349, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.8333744754381635, |
|
"grad_norm": 0.5086573362350464, |
|
"learning_rate": 2.800433355341898e-05, |
|
"loss": 0.1367, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.8373241175018514, |
|
"grad_norm": 0.47434648871421814, |
|
"learning_rate": 2.7985720199241736e-05, |
|
"loss": 0.1458, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.8412737595655394, |
|
"grad_norm": 0.5867214798927307, |
|
"learning_rate": 2.796702669056703e-05, |
|
"loss": 0.1319, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.8452234016292274, |
|
"grad_norm": 0.4446616768836975, |
|
"learning_rate": 2.794825314278074e-05, |
|
"loss": 0.1266, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.8491730436929154, |
|
"grad_norm": 0.44527551531791687, |
|
"learning_rate": 2.7929399671762794e-05, |
|
"loss": 0.1396, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.8531226857566033, |
|
"grad_norm": 0.4233611524105072, |
|
"learning_rate": 2.791046639388644e-05, |
|
"loss": 0.1265, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.8570723278202913, |
|
"grad_norm": 0.42697539925575256, |
|
"learning_rate": 2.7891453426017552e-05, |
|
"loss": 0.129, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.8610219698839793, |
|
"grad_norm": 0.5311276912689209, |
|
"learning_rate": 2.7872360885513862e-05, |
|
"loss": 0.1351, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.8649716119476673, |
|
"grad_norm": 0.45064228773117065, |
|
"learning_rate": 2.7853188890224292e-05, |
|
"loss": 0.1132, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.8689212540113552, |
|
"grad_norm": 0.39009493589401245, |
|
"learning_rate": 2.7833937558488185e-05, |
|
"loss": 0.1327, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.8728708960750432, |
|
"grad_norm": 0.39206671714782715, |
|
"learning_rate": 2.7814607009134595e-05, |
|
"loss": 0.1209, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.8768205381387312, |
|
"grad_norm": 0.35631102323532104, |
|
"learning_rate": 2.7795197361481545e-05, |
|
"loss": 0.1267, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.8807701802024192, |
|
"grad_norm": 0.4283501207828522, |
|
"learning_rate": 2.7775708735335293e-05, |
|
"loss": 0.135, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.8847198222661071, |
|
"grad_norm": 0.3623165190219879, |
|
"learning_rate": 2.7756141250989593e-05, |
|
"loss": 0.1277, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.8886694643297951, |
|
"grad_norm": 0.42114606499671936, |
|
"learning_rate": 2.773649502922495e-05, |
|
"loss": 0.1378, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.8926191063934831, |
|
"grad_norm": 0.4476473033428192, |
|
"learning_rate": 2.7716770191307887e-05, |
|
"loss": 0.1296, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.896568748457171, |
|
"grad_norm": 0.3927001655101776, |
|
"learning_rate": 2.7696966858990172e-05, |
|
"loss": 0.1348, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.900518390520859, |
|
"grad_norm": 0.4335472881793976, |
|
"learning_rate": 2.7677085154508085e-05, |
|
"loss": 0.1243, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.904468032584547, |
|
"grad_norm": 0.432326078414917, |
|
"learning_rate": 2.7657125200581666e-05, |
|
"loss": 0.1232, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.908417674648235, |
|
"grad_norm": 0.42572349309921265, |
|
"learning_rate": 2.7637087120413937e-05, |
|
"loss": 0.1197, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.912367316711923, |
|
"grad_norm": 0.5097776651382446, |
|
"learning_rate": 2.761697103769017e-05, |
|
"loss": 0.1106, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.9163169587756109, |
|
"grad_norm": 0.4214634895324707, |
|
"learning_rate": 2.7596777076577105e-05, |
|
"loss": 0.1306, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.9202666008392989, |
|
"grad_norm": 0.5993767380714417, |
|
"learning_rate": 2.7576505361722174e-05, |
|
"loss": 0.1308, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.9242162429029869, |
|
"grad_norm": 0.44176799058914185, |
|
"learning_rate": 2.755615601825276e-05, |
|
"loss": 0.1348, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.9281658849666748, |
|
"grad_norm": 0.4011238217353821, |
|
"learning_rate": 2.7535729171775406e-05, |
|
"loss": 0.1357, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.9321155270303628, |
|
"grad_norm": 0.35617443919181824, |
|
"learning_rate": 2.7515224948375038e-05, |
|
"loss": 0.1299, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.9360651690940508, |
|
"grad_norm": 0.3995439112186432, |
|
"learning_rate": 2.7494643474614197e-05, |
|
"loss": 0.1327, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.9400148111577389, |
|
"grad_norm": 0.35780492424964905, |
|
"learning_rate": 2.7473984877532247e-05, |
|
"loss": 0.1407, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.9439644532214269, |
|
"grad_norm": 0.46763497591018677, |
|
"learning_rate": 2.745324928464461e-05, |
|
"loss": 0.1316, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.9479140952851148, |
|
"grad_norm": 0.5247623324394226, |
|
"learning_rate": 2.743243682394195e-05, |
|
"loss": 0.1353, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.9518637373488028, |
|
"grad_norm": 0.5231720805168152, |
|
"learning_rate": 2.7411547623889397e-05, |
|
"loss": 0.127, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.9558133794124908, |
|
"grad_norm": 0.38919833302497864, |
|
"learning_rate": 2.7390581813425776e-05, |
|
"loss": 0.1197, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.9597630214761788, |
|
"grad_norm": 0.4457249045372009, |
|
"learning_rate": 2.736953952196277e-05, |
|
"loss": 0.1333, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.9637126635398667, |
|
"grad_norm": 0.5078391432762146, |
|
"learning_rate": 2.734842087938415e-05, |
|
"loss": 0.1318, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.9676623056035547, |
|
"grad_norm": 0.5152226090431213, |
|
"learning_rate": 2.7327226016044965e-05, |
|
"loss": 0.133, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.9716119476672427, |
|
"grad_norm": 0.4484505355358124, |
|
"learning_rate": 2.7305955062770738e-05, |
|
"loss": 0.1291, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.9755615897309307, |
|
"grad_norm": 0.38752976059913635, |
|
"learning_rate": 2.728460815085665e-05, |
|
"loss": 0.1274, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.9795112317946186, |
|
"grad_norm": 0.433149129152298, |
|
"learning_rate": 2.7263185412066756e-05, |
|
"loss": 0.1205, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.9834608738583066, |
|
"grad_norm": 0.4824409782886505, |
|
"learning_rate": 2.724168697863313e-05, |
|
"loss": 0.1369, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.9874105159219946, |
|
"grad_norm": 0.4385254383087158, |
|
"learning_rate": 2.722011298325509e-05, |
|
"loss": 0.1249, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.9913601579856826, |
|
"grad_norm": 0.44593289494514465, |
|
"learning_rate": 2.719846355909835e-05, |
|
"loss": 0.1336, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.9953098000493705, |
|
"grad_norm": 0.5583507418632507, |
|
"learning_rate": 2.7176738839794218e-05, |
|
"loss": 0.1402, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.9992594421130585, |
|
"grad_norm": 0.47735145688056946, |
|
"learning_rate": 2.7154938959438757e-05, |
|
"loss": 0.1241, |
|
"step": 2530 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 12655, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.5381326285910835e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|