|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.99936, |
|
"eval_steps": 500, |
|
"global_step": 1562, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0128, |
|
"grad_norm": 3.1302534958568193, |
|
"learning_rate": 1.2738853503184715e-06, |
|
"loss": 0.6569, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0256, |
|
"grad_norm": 3.1468711921047627, |
|
"learning_rate": 2.547770700636943e-06, |
|
"loss": 0.5179, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0384, |
|
"grad_norm": 0.5540977306160393, |
|
"learning_rate": 3.821656050955415e-06, |
|
"loss": 0.2369, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0512, |
|
"grad_norm": 0.5002871919834374, |
|
"learning_rate": 5.095541401273886e-06, |
|
"loss": 0.1819, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 0.7150745925714665, |
|
"learning_rate": 6.369426751592357e-06, |
|
"loss": 0.1759, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0768, |
|
"grad_norm": 0.7619036468756623, |
|
"learning_rate": 7.64331210191083e-06, |
|
"loss": 0.16, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0896, |
|
"grad_norm": 0.44198933481881236, |
|
"learning_rate": 8.9171974522293e-06, |
|
"loss": 0.1608, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1024, |
|
"grad_norm": 0.37478693328542767, |
|
"learning_rate": 1.0191082802547772e-05, |
|
"loss": 0.1617, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1152, |
|
"grad_norm": 0.3696718954770576, |
|
"learning_rate": 1.1464968152866242e-05, |
|
"loss": 0.1596, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 0.5203159641478062, |
|
"learning_rate": 1.2738853503184714e-05, |
|
"loss": 0.1602, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1408, |
|
"grad_norm": 0.37773491168605033, |
|
"learning_rate": 1.4012738853503186e-05, |
|
"loss": 0.1485, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1536, |
|
"grad_norm": 0.24203685828143712, |
|
"learning_rate": 1.528662420382166e-05, |
|
"loss": 0.1555, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1664, |
|
"grad_norm": 0.3898599912917371, |
|
"learning_rate": 1.6560509554140128e-05, |
|
"loss": 0.1571, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1792, |
|
"grad_norm": 0.22025246679268023, |
|
"learning_rate": 1.78343949044586e-05, |
|
"loss": 0.1559, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 0.5943517884905586, |
|
"learning_rate": 1.910828025477707e-05, |
|
"loss": 0.1578, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2048, |
|
"grad_norm": 0.4363975905376457, |
|
"learning_rate": 1.999977501271127e-05, |
|
"loss": 0.1468, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2176, |
|
"grad_norm": 0.4874326999671375, |
|
"learning_rate": 1.9995775520317924e-05, |
|
"loss": 0.1519, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2304, |
|
"grad_norm": 0.32242234734849035, |
|
"learning_rate": 1.998677861199748e-05, |
|
"loss": 0.1537, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2432, |
|
"grad_norm": 0.338978065876802, |
|
"learning_rate": 1.9972788785779404e-05, |
|
"loss": 0.1535, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 0.5405845227789843, |
|
"learning_rate": 1.9953813035916444e-05, |
|
"loss": 0.1552, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2688, |
|
"grad_norm": 0.3187325098825458, |
|
"learning_rate": 1.9929860849387815e-05, |
|
"loss": 0.1441, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2816, |
|
"grad_norm": 0.181656187619335, |
|
"learning_rate": 1.9900944201156164e-05, |
|
"loss": 0.1489, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2944, |
|
"grad_norm": 0.26409308919505864, |
|
"learning_rate": 1.986707754818064e-05, |
|
"loss": 0.1509, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3072, |
|
"grad_norm": 0.23639855452929004, |
|
"learning_rate": 1.982827782218912e-05, |
|
"loss": 0.1525, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.42132507371171557, |
|
"learning_rate": 1.9784564421213122e-05, |
|
"loss": 0.1513, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3328, |
|
"grad_norm": 0.30911958261458256, |
|
"learning_rate": 1.9735959199889723e-05, |
|
"loss": 0.1415, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3456, |
|
"grad_norm": 0.286475064941514, |
|
"learning_rate": 1.968248645853526e-05, |
|
"loss": 0.1483, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3584, |
|
"grad_norm": 0.3223686350575121, |
|
"learning_rate": 1.9624172930996322e-05, |
|
"loss": 0.1496, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3712, |
|
"grad_norm": 0.33039144824318656, |
|
"learning_rate": 1.956104777128409e-05, |
|
"loss": 0.1496, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 0.3268436732392019, |
|
"learning_rate": 1.9493142538998713e-05, |
|
"loss": 0.1524, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3968, |
|
"grad_norm": 0.3768976535567336, |
|
"learning_rate": 1.9420491183550983e-05, |
|
"loss": 0.1397, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4096, |
|
"grad_norm": 0.344679435905093, |
|
"learning_rate": 1.934313002718924e-05, |
|
"loss": 0.1459, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4224, |
|
"grad_norm": 0.1867877479920934, |
|
"learning_rate": 1.9261097746839974e-05, |
|
"loss": 0.149, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4352, |
|
"grad_norm": 0.22826415693304572, |
|
"learning_rate": 1.9174435354771167e-05, |
|
"loss": 0.1515, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 0.2574654153325295, |
|
"learning_rate": 1.9083186178088103e-05, |
|
"loss": 0.1501, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4608, |
|
"grad_norm": 0.23070296857068737, |
|
"learning_rate": 1.898739583707187e-05, |
|
"loss": 0.1383, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4736, |
|
"grad_norm": 0.21129982050815863, |
|
"learning_rate": 1.8887112222371363e-05, |
|
"loss": 0.1437, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4864, |
|
"grad_norm": 0.17026221614799503, |
|
"learning_rate": 1.8782385471060217e-05, |
|
"loss": 0.1495, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4992, |
|
"grad_norm": 0.2103435712105906, |
|
"learning_rate": 1.8673267941570646e-05, |
|
"loss": 0.1495, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 0.2949165035198057, |
|
"learning_rate": 1.8559814187516692e-05, |
|
"loss": 0.1482, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5248, |
|
"grad_norm": 0.2727792628609804, |
|
"learning_rate": 1.844208093042e-05, |
|
"loss": 0.1385, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5376, |
|
"grad_norm": 0.19011236871362203, |
|
"learning_rate": 1.8320127031351723e-05, |
|
"loss": 0.1447, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5504, |
|
"grad_norm": 0.20224724252134554, |
|
"learning_rate": 1.8194013461504774e-05, |
|
"loss": 0.1457, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5632, |
|
"grad_norm": 0.21758912206027958, |
|
"learning_rate": 1.806380327171111e-05, |
|
"loss": 0.146, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 0.394244725563091, |
|
"learning_rate": 1.792956156091928e-05, |
|
"loss": 0.1473, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5888, |
|
"grad_norm": 0.3292629665585757, |
|
"learning_rate": 1.7791355443648045e-05, |
|
"loss": 0.1378, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6016, |
|
"grad_norm": 0.19521004688992183, |
|
"learning_rate": 1.7649254016432247e-05, |
|
"loss": 0.1456, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6144, |
|
"grad_norm": 0.2316602793370239, |
|
"learning_rate": 1.750332832327786e-05, |
|
"loss": 0.1451, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6272, |
|
"grad_norm": 0.17573541945620594, |
|
"learning_rate": 1.735365132014329e-05, |
|
"loss": 0.1486, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.3527896812147239, |
|
"learning_rate": 1.7200297838464864e-05, |
|
"loss": 0.1488, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6528, |
|
"grad_norm": 0.21609855998849087, |
|
"learning_rate": 1.7043344547744637e-05, |
|
"loss": 0.138, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6656, |
|
"grad_norm": 0.20743287144243716, |
|
"learning_rate": 1.6882869917219265e-05, |
|
"loss": 0.1432, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6784, |
|
"grad_norm": 0.14306391155148201, |
|
"learning_rate": 1.6718954176629088e-05, |
|
"loss": 0.1458, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6912, |
|
"grad_norm": 0.13082293792565916, |
|
"learning_rate": 1.6551679276107047e-05, |
|
"loss": 0.1468, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 0.36161924084937497, |
|
"learning_rate": 1.638112884520748e-05, |
|
"loss": 0.1469, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7168, |
|
"grad_norm": 0.28617618689753893, |
|
"learning_rate": 1.620738815109531e-05, |
|
"loss": 0.1371, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7296, |
|
"grad_norm": 0.13750653322717393, |
|
"learning_rate": 1.6030544055916462e-05, |
|
"loss": 0.1423, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7424, |
|
"grad_norm": 0.15234113342700173, |
|
"learning_rate": 1.5850684973370913e-05, |
|
"loss": 0.1473, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7552, |
|
"grad_norm": 0.1674605203651358, |
|
"learning_rate": 1.5667900824510005e-05, |
|
"loss": 0.1465, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 0.30120726166690853, |
|
"learning_rate": 1.5482282992780155e-05, |
|
"loss": 0.1459, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7808, |
|
"grad_norm": 0.2556669288569797, |
|
"learning_rate": 1.5293924278335475e-05, |
|
"loss": 0.1372, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.7936, |
|
"grad_norm": 0.19878411049962907, |
|
"learning_rate": 1.5102918851642035e-05, |
|
"loss": 0.1416, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8064, |
|
"grad_norm": 0.14869095864210355, |
|
"learning_rate": 1.4909362206397114e-05, |
|
"loss": 0.1435, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8192, |
|
"grad_norm": 0.21996626003622385, |
|
"learning_rate": 1.4713351111786824e-05, |
|
"loss": 0.1462, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 0.29224476727899246, |
|
"learning_rate": 1.4514983564106117e-05, |
|
"loss": 0.1449, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8448, |
|
"grad_norm": 0.2688310061351837, |
|
"learning_rate": 1.431435873776524e-05, |
|
"loss": 0.1374, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8576, |
|
"grad_norm": 0.20038216433613684, |
|
"learning_rate": 1.4111576935707214e-05, |
|
"loss": 0.1426, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8704, |
|
"grad_norm": 0.13687808263034185, |
|
"learning_rate": 1.3906739539261115e-05, |
|
"loss": 0.1457, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8832, |
|
"grad_norm": 0.19032971439937477, |
|
"learning_rate": 1.3699948957456176e-05, |
|
"loss": 0.1455, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 0.3208508208806956, |
|
"learning_rate": 1.3491308575822111e-05, |
|
"loss": 0.1435, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9088, |
|
"grad_norm": 0.19524278584922394, |
|
"learning_rate": 1.3280922704701231e-05, |
|
"loss": 0.1365, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9216, |
|
"grad_norm": 0.17798035245935892, |
|
"learning_rate": 1.3068896527098205e-05, |
|
"loss": 0.1433, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9344, |
|
"grad_norm": 0.17884600087369845, |
|
"learning_rate": 1.2855336046093513e-05, |
|
"loss": 0.1447, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9472, |
|
"grad_norm": 0.14100954677816485, |
|
"learning_rate": 1.2640348031846897e-05, |
|
"loss": 0.1447, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.33304512484187876, |
|
"learning_rate": 1.2424039968217361e-05, |
|
"loss": 0.145, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9728, |
|
"grad_norm": 0.2096407271139896, |
|
"learning_rate": 1.220651999902627e-05, |
|
"loss": 0.1355, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9856, |
|
"grad_norm": 0.18948626555533854, |
|
"learning_rate": 1.1987896873990572e-05, |
|
"loss": 0.1422, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.9984, |
|
"grad_norm": 0.25576476392968256, |
|
"learning_rate": 1.176827989435307e-05, |
|
"loss": 0.1473, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.99968, |
|
"eval_loss": 0.13926610350608826, |
|
"eval_runtime": 42.0798, |
|
"eval_samples_per_second": 23.764, |
|
"eval_steps_per_second": 1.497, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.0112, |
|
"grad_norm": 0.15072347755987592, |
|
"learning_rate": 1.1547778858236938e-05, |
|
"loss": 0.1361, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 0.24763730684547336, |
|
"learning_rate": 1.1326504005751875e-05, |
|
"loss": 0.1426, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.0368, |
|
"grad_norm": 0.2049805600857785, |
|
"learning_rate": 1.1104565963879233e-05, |
|
"loss": 0.1417, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.0496, |
|
"grad_norm": 0.17505933336916096, |
|
"learning_rate": 1.0882075691163749e-05, |
|
"loss": 0.1425, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.0624, |
|
"grad_norm": 0.2367359754977405, |
|
"learning_rate": 1.065914442223952e-05, |
|
"loss": 0.1395, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.0752, |
|
"grad_norm": 0.2168168491904799, |
|
"learning_rate": 1.0435883612217928e-05, |
|
"loss": 0.1368, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 0.16667141395128426, |
|
"learning_rate": 1.0212404880965348e-05, |
|
"loss": 0.1397, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.1008, |
|
"grad_norm": 0.19793016123521015, |
|
"learning_rate": 9.988819957298487e-06, |
|
"loss": 0.1447, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.1136, |
|
"grad_norm": 0.21711053823618134, |
|
"learning_rate": 9.765240623125238e-06, |
|
"loss": 0.1418, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1264, |
|
"grad_norm": 0.24562717005747997, |
|
"learning_rate": 9.541778657559026e-06, |
|
"loss": 0.1371, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.1392, |
|
"grad_norm": 0.17839427327356683, |
|
"learning_rate": 9.318545781034499e-06, |
|
"loss": 0.1368, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 0.18971426870401556, |
|
"learning_rate": 9.095653599452604e-06, |
|
"loss": 0.1411, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.1648, |
|
"grad_norm": 0.2028582361588426, |
|
"learning_rate": 8.873213548382895e-06, |
|
"loss": 0.1432, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.1776, |
|
"grad_norm": 0.1910575100313919, |
|
"learning_rate": 8.651336837351013e-06, |
|
"loss": 0.1441, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.1904, |
|
"grad_norm": 0.21795820010528985, |
|
"learning_rate": 8.430134394239182e-06, |
|
"loss": 0.1389, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.2032, |
|
"grad_norm": 0.1702846327798786, |
|
"learning_rate": 8.209716809827475e-06, |
|
"loss": 0.1387, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 0.19574705900720263, |
|
"learning_rate": 7.990194282503678e-06, |
|
"loss": 0.1408, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2288000000000001, |
|
"grad_norm": 0.15444562246412258, |
|
"learning_rate": 7.771676563169276e-06, |
|
"loss": 0.1424, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.2416, |
|
"grad_norm": 0.27192054451029335, |
|
"learning_rate": 7.554272900369184e-06, |
|
"loss": 0.1431, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.2544, |
|
"grad_norm": 0.28293536797942354, |
|
"learning_rate": 7.338091985672651e-06, |
|
"loss": 0.1375, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.2671999999999999, |
|
"grad_norm": 0.27054236168586154, |
|
"learning_rate": 7.1232418993325836e-06, |
|
"loss": 0.1346, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.25032137575039304, |
|
"learning_rate": 6.909830056250527e-06, |
|
"loss": 0.1401, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.2928, |
|
"grad_norm": 0.1593137714997539, |
|
"learning_rate": 6.697963152274294e-06, |
|
"loss": 0.1415, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.3056, |
|
"grad_norm": 0.17468257527642406, |
|
"learning_rate": 6.487747110855061e-06, |
|
"loss": 0.1421, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.3184, |
|
"grad_norm": 0.198558265817394, |
|
"learning_rate": 6.279287030090664e-06, |
|
"loss": 0.1387, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.3312, |
|
"grad_norm": 0.11504815520373578, |
|
"learning_rate": 6.072687130181493e-06, |
|
"loss": 0.1353, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 0.13858890875298494, |
|
"learning_rate": 5.868050701325314e-06, |
|
"loss": 0.1398, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.3568, |
|
"grad_norm": 0.13445081822803, |
|
"learning_rate": 5.665480052077051e-06, |
|
"loss": 0.1409, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.3696, |
|
"grad_norm": 0.16055599811743235, |
|
"learning_rate": 5.465076458199332e-06, |
|
"loss": 0.1417, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.3824, |
|
"grad_norm": 0.19169771082374196, |
|
"learning_rate": 5.266940112029406e-06, |
|
"loss": 0.138, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.3952, |
|
"grad_norm": 0.18154527027617387, |
|
"learning_rate": 5.071170072387672e-06, |
|
"loss": 0.1373, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 0.13523001447293553, |
|
"learning_rate": 4.877864215052969e-06, |
|
"loss": 0.1382, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.4208, |
|
"grad_norm": 0.14620192480202174, |
|
"learning_rate": 4.687119183829305e-06, |
|
"loss": 0.1403, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.4336, |
|
"grad_norm": 0.14563804992816887, |
|
"learning_rate": 4.4990303422285005e-06, |
|
"loss": 0.1401, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.4464000000000001, |
|
"grad_norm": 0.18048671111150355, |
|
"learning_rate": 4.31369172579296e-06, |
|
"loss": 0.1351, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.4592, |
|
"grad_norm": 0.1274956008805008, |
|
"learning_rate": 4.1311959950823125e-06, |
|
"loss": 0.1357, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 0.15186812567018518, |
|
"learning_rate": 3.951634389347522e-06, |
|
"loss": 0.1405, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.4848, |
|
"grad_norm": 0.1743781587386354, |
|
"learning_rate": 3.7750966809155633e-06, |
|
"loss": 0.1411, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.4976, |
|
"grad_norm": 0.16145108253749546, |
|
"learning_rate": 3.6016711303075003e-06, |
|
"loss": 0.1418, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.5104, |
|
"grad_norm": 0.2443925847158833, |
|
"learning_rate": 3.431444442112395e-06, |
|
"loss": 0.1365, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.5232, |
|
"grad_norm": 0.16076378293495353, |
|
"learning_rate": 3.264501721639086e-06, |
|
"loss": 0.1354, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 0.20722028172668885, |
|
"learning_rate": 3.1009264323675737e-06, |
|
"loss": 0.1387, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.5488, |
|
"grad_norm": 0.2367805090756499, |
|
"learning_rate": 2.940800354221205e-06, |
|
"loss": 0.1414, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.5615999999999999, |
|
"grad_norm": 0.2045021555576582, |
|
"learning_rate": 2.7842035426805858e-06, |
|
"loss": 0.1435, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.5744, |
|
"grad_norm": 0.3378391898032357, |
|
"learning_rate": 2.6312142887596116e-06, |
|
"loss": 0.137, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.5872000000000002, |
|
"grad_norm": 0.15144211781786535, |
|
"learning_rate": 2.4819090798636504e-06, |
|
"loss": 0.1362, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.19705372401646207, |
|
"learning_rate": 2.336362561549459e-06, |
|
"loss": 0.1398, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.6128, |
|
"grad_norm": 0.1981712638313628, |
|
"learning_rate": 2.194647500205925e-06, |
|
"loss": 0.1412, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.6256, |
|
"grad_norm": 0.22583463728566275, |
|
"learning_rate": 2.056834746674313e-06, |
|
"loss": 0.1402, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.6383999999999999, |
|
"grad_norm": 0.3166766019266127, |
|
"learning_rate": 1.9229932008261756e-06, |
|
"loss": 0.1366, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.6512, |
|
"grad_norm": 0.13358881231561032, |
|
"learning_rate": 1.7931897771166861e-06, |
|
"loss": 0.1346, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 0.28514268179697894, |
|
"learning_rate": 1.6674893711305528e-06, |
|
"loss": 0.1399, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.6768, |
|
"grad_norm": 0.08846971622020448, |
|
"learning_rate": 1.5459548271373005e-06, |
|
"loss": 0.1383, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.6896, |
|
"grad_norm": 0.28809034512805015, |
|
"learning_rate": 1.4286469066721055e-06, |
|
"loss": 0.1399, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.7024, |
|
"grad_norm": 0.1598977533627517, |
|
"learning_rate": 1.3156242581578815e-06, |
|
"loss": 0.1351, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.7151999999999998, |
|
"grad_norm": 0.19800584875933322, |
|
"learning_rate": 1.2069433875838498e-06, |
|
"loss": 0.1347, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 0.12495858993727499, |
|
"learning_rate": 1.10265863025521e-06, |
|
"loss": 0.1376, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.7408000000000001, |
|
"grad_norm": 0.12691325643260487, |
|
"learning_rate": 1.0028221236280445e-06, |
|
"loss": 0.1408, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.7536, |
|
"grad_norm": 0.14092289840202932, |
|
"learning_rate": 9.074837812430626e-07, |
|
"loss": 0.1404, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.7664, |
|
"grad_norm": 0.1356179011766789, |
|
"learning_rate": 8.166912677711769e-07, |
|
"loss": 0.1344, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.7792, |
|
"grad_norm": 0.11072101415903435, |
|
"learning_rate": 7.304899751834194e-07, |
|
"loss": 0.1363, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 0.09364893615869158, |
|
"learning_rate": 6.489230000571067e-07, |
|
"loss": 0.1387, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.8048, |
|
"grad_norm": 0.09719389776054362, |
|
"learning_rate": 5.720311220295727e-07, |
|
"loss": 0.1406, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.8176, |
|
"grad_norm": 0.14149128093688274, |
|
"learning_rate": 4.998527834102873e-07, |
|
"loss": 0.1411, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.8304, |
|
"grad_norm": 0.15889816186833727, |
|
"learning_rate": 4.3242406996150674e-07, |
|
"loss": 0.1345, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.8432, |
|
"grad_norm": 0.0958974593538415, |
|
"learning_rate": 3.697786928571034e-07, |
|
"loss": 0.1341, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 0.07979518065313038, |
|
"learning_rate": 3.119479718285601e-07, |
|
"loss": 0.1381, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.8688, |
|
"grad_norm": 0.10204011708199633, |
|
"learning_rate": 2.5896081950659737e-07, |
|
"loss": 0.1408, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.8816000000000002, |
|
"grad_norm": 0.11627971006018599, |
|
"learning_rate": 2.108437269662089e-07, |
|
"loss": 0.1433, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.8944, |
|
"grad_norm": 0.15458392923737632, |
|
"learning_rate": 1.6762075048238347e-07, |
|
"loss": 0.1359, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.9072, |
|
"grad_norm": 0.10450160701417593, |
|
"learning_rate": 1.2931349950309535e-07, |
|
"loss": 0.1342, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.09770183301391486, |
|
"learning_rate": 9.594112584561e-08, |
|
"loss": 0.1381, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.9327999999999999, |
|
"grad_norm": 0.09381477173800809, |
|
"learning_rate": 6.752031412147086e-08, |
|
"loss": 0.1394, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.9456, |
|
"grad_norm": 0.12023221514437896, |
|
"learning_rate": 4.406527339498423e-08, |
|
"loss": 0.1416, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.9584000000000001, |
|
"grad_norm": 0.16776774231570918, |
|
"learning_rate": 2.5587730079362772e-08, |
|
"loss": 0.134, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.9712, |
|
"grad_norm": 0.09797271568610905, |
|
"learning_rate": 1.2096922074060768e-08, |
|
"loss": 0.134, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 0.09345305263894858, |
|
"learning_rate": 3.5995941462718677e-09, |
|
"loss": 0.138, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.9968, |
|
"grad_norm": 0.08633576245142834, |
|
"learning_rate": 9.999455885978393e-11, |
|
"loss": 0.1406, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.99936, |
|
"eval_loss": 0.13485482335090637, |
|
"eval_runtime": 42.0832, |
|
"eval_samples_per_second": 23.762, |
|
"eval_steps_per_second": 1.497, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 1.99936, |
|
"step": 1562, |
|
"total_flos": 534097819484160.0, |
|
"train_loss": 0.14969347108265196, |
|
"train_runtime": 45991.4783, |
|
"train_samples_per_second": 4.349, |
|
"train_steps_per_second": 0.034 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1562, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 781, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 534097819484160.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|