|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 819, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001221001221001221, |
|
"grad_norm": 1.0205980343161538, |
|
"learning_rate": 1.2195121951219513e-05, |
|
"loss": 1.6063, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006105006105006105, |
|
"grad_norm": 0.89369227471272, |
|
"learning_rate": 6.097560975609756e-05, |
|
"loss": 1.6127, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01221001221001221, |
|
"grad_norm": 0.773993509246519, |
|
"learning_rate": 0.00012195121951219512, |
|
"loss": 1.5596, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.018315018315018316, |
|
"grad_norm": 0.6209997076659476, |
|
"learning_rate": 0.00018292682926829268, |
|
"loss": 1.4619, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02442002442002442, |
|
"grad_norm": 0.4770932033448127, |
|
"learning_rate": 0.00024390243902439024, |
|
"loss": 1.4028, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.030525030525030524, |
|
"grad_norm": 0.3320501554134054, |
|
"learning_rate": 0.0003048780487804878, |
|
"loss": 1.347, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03663003663003663, |
|
"grad_norm": 0.2002596607926003, |
|
"learning_rate": 0.00036585365853658537, |
|
"loss": 1.3103, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.042735042735042736, |
|
"grad_norm": 0.20128176628182073, |
|
"learning_rate": 0.0004268292682926829, |
|
"loss": 1.3026, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04884004884004884, |
|
"grad_norm": 0.17594648344906566, |
|
"learning_rate": 0.0004878048780487805, |
|
"loss": 1.2854, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.054945054945054944, |
|
"grad_norm": 0.17236683696702648, |
|
"learning_rate": 0.0005487804878048781, |
|
"loss": 1.271, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06105006105006105, |
|
"grad_norm": 0.17916116023910555, |
|
"learning_rate": 0.0006097560975609756, |
|
"loss": 1.2661, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06715506715506715, |
|
"grad_norm": 0.15617178601932155, |
|
"learning_rate": 0.0006707317073170732, |
|
"loss": 1.2528, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07326007326007326, |
|
"grad_norm": 0.1927465066785224, |
|
"learning_rate": 0.0007317073170731707, |
|
"loss": 1.252, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07936507936507936, |
|
"grad_norm": 0.20776151355108807, |
|
"learning_rate": 0.0007926829268292683, |
|
"loss": 1.2463, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08547008547008547, |
|
"grad_norm": 0.1655472216601126, |
|
"learning_rate": 0.0008536585365853659, |
|
"loss": 1.239, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09157509157509157, |
|
"grad_norm": 0.20874533177899088, |
|
"learning_rate": 0.0009146341463414635, |
|
"loss": 1.2417, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09768009768009768, |
|
"grad_norm": 0.21100479425784993, |
|
"learning_rate": 0.000975609756097561, |
|
"loss": 1.2328, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10378510378510379, |
|
"grad_norm": 0.20904541266897642, |
|
"learning_rate": 0.000999959117130623, |
|
"loss": 1.2336, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10989010989010989, |
|
"grad_norm": 0.20019701905382167, |
|
"learning_rate": 0.000999709301584265, |
|
"loss": 1.2252, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.115995115995116, |
|
"grad_norm": 0.21413001652476213, |
|
"learning_rate": 0.0009992324965361792, |
|
"loss": 1.2243, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1221001221001221, |
|
"grad_norm": 0.16522854367968515, |
|
"learning_rate": 0.0009985289185717684, |
|
"loss": 1.2121, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1282051282051282, |
|
"grad_norm": 0.25042890936419604, |
|
"learning_rate": 0.000997598887286467, |
|
"loss": 1.2153, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1343101343101343, |
|
"grad_norm": 0.2092348207313529, |
|
"learning_rate": 0.000996442825140569, |
|
"loss": 1.2067, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14041514041514042, |
|
"grad_norm": 0.1987412303455424, |
|
"learning_rate": 0.0009950612572673255, |
|
"loss": 1.211, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14652014652014653, |
|
"grad_norm": 0.19695075669196946, |
|
"learning_rate": 0.0009934548112344088, |
|
"loss": 1.2139, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15262515262515264, |
|
"grad_norm": 0.1432709740921402, |
|
"learning_rate": 0.0009916242167588433, |
|
"loss": 1.2147, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.15873015873015872, |
|
"grad_norm": 0.16002380073043662, |
|
"learning_rate": 0.0009895703053755364, |
|
"loss": 1.2028, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16483516483516483, |
|
"grad_norm": 0.15340967683760462, |
|
"learning_rate": 0.0009872940100595598, |
|
"loss": 1.1995, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.17094017094017094, |
|
"grad_norm": 0.18565794761972862, |
|
"learning_rate": 0.0009847963648023522, |
|
"loss": 1.1992, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17704517704517705, |
|
"grad_norm": 0.20113198830929513, |
|
"learning_rate": 0.000982078504142035, |
|
"loss": 1.1846, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18315018315018314, |
|
"grad_norm": 0.239052195300538, |
|
"learning_rate": 0.000979141662648057, |
|
"loss": 1.1917, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.18925518925518925, |
|
"grad_norm": 0.18541851296547388, |
|
"learning_rate": 0.0009759871743604004, |
|
"loss": 1.1883, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.19536019536019536, |
|
"grad_norm": 0.1976795721725536, |
|
"learning_rate": 0.0009726164721835996, |
|
"loss": 1.1844, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.20146520146520147, |
|
"grad_norm": 0.17463002143028833, |
|
"learning_rate": 0.0009690310872358572, |
|
"loss": 1.1969, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.20757020757020758, |
|
"grad_norm": 0.21323309987103073, |
|
"learning_rate": 0.0009652326481535434, |
|
"loss": 1.188, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21367521367521367, |
|
"grad_norm": 0.23172909898163094, |
|
"learning_rate": 0.0009612228803513976, |
|
"loss": 1.186, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.21978021978021978, |
|
"grad_norm": 0.22579230538853118, |
|
"learning_rate": 0.0009570036052387725, |
|
"loss": 1.1798, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2258852258852259, |
|
"grad_norm": 0.1619341093580773, |
|
"learning_rate": 0.0009525767393922706, |
|
"loss": 1.1662, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.231990231990232, |
|
"grad_norm": 0.183207674619275, |
|
"learning_rate": 0.0009479442936851526, |
|
"loss": 1.1723, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 0.21188703922152927, |
|
"learning_rate": 0.0009431083723739124, |
|
"loss": 1.1576, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2442002442002442, |
|
"grad_norm": 0.2089350434131967, |
|
"learning_rate": 0.0009380711721424326, |
|
"loss": 1.1619, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2503052503052503, |
|
"grad_norm": 0.15157070914539794, |
|
"learning_rate": 0.0009328349811041565, |
|
"loss": 1.1631, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2564102564102564, |
|
"grad_norm": 0.16351230424545868, |
|
"learning_rate": 0.0009274021777627277, |
|
"loss": 1.1709, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2625152625152625, |
|
"grad_norm": 0.17564187179508275, |
|
"learning_rate": 0.0009217752299315725, |
|
"loss": 1.1462, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2686202686202686, |
|
"grad_norm": 0.2131938226227643, |
|
"learning_rate": 0.0009159566936129111, |
|
"loss": 1.1723, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.27472527472527475, |
|
"grad_norm": 0.20887852013049316, |
|
"learning_rate": 0.0009099492118367123, |
|
"loss": 1.1689, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.28083028083028083, |
|
"grad_norm": 0.17857721915396702, |
|
"learning_rate": 0.0009037555134601149, |
|
"loss": 1.1466, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2869352869352869, |
|
"grad_norm": 0.1620613655817899, |
|
"learning_rate": 0.000897378411927864, |
|
"loss": 1.1592, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.29304029304029305, |
|
"grad_norm": 0.2125809185558365, |
|
"learning_rate": 0.0008908208039943213, |
|
"loss": 1.1485, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.29914529914529914, |
|
"grad_norm": 0.1748347227235315, |
|
"learning_rate": 0.0008840856684076366, |
|
"loss": 1.1332, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.3052503052503053, |
|
"grad_norm": 0.22239189840818827, |
|
"learning_rate": 0.0008771760645566706, |
|
"loss": 1.141, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.31135531135531136, |
|
"grad_norm": 3.2813461087169853, |
|
"learning_rate": 0.000870095131081289, |
|
"loss": 1.1641, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.31746031746031744, |
|
"grad_norm": 0.2268012798675002, |
|
"learning_rate": 0.0008628460844466573, |
|
"loss": 1.1401, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3235653235653236, |
|
"grad_norm": 0.19745728202118107, |
|
"learning_rate": 0.0008554322174821833, |
|
"loss": 1.1401, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.32967032967032966, |
|
"grad_norm": 0.1818589584492454, |
|
"learning_rate": 0.0008478568978857722, |
|
"loss": 1.1412, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.33577533577533575, |
|
"grad_norm": 0.17803807739093683, |
|
"learning_rate": 0.0008401235666940728, |
|
"loss": 1.1427, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3418803418803419, |
|
"grad_norm": 0.1635966967881225, |
|
"learning_rate": 0.0008322357367194109, |
|
"loss": 1.1348, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.34798534798534797, |
|
"grad_norm": 0.16634307483679497, |
|
"learning_rate": 0.0008241969909541184, |
|
"loss": 1.1291, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3540903540903541, |
|
"grad_norm": 0.22380463687576627, |
|
"learning_rate": 0.0008160109809429835, |
|
"loss": 1.1375, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3601953601953602, |
|
"grad_norm": 0.17014516986944303, |
|
"learning_rate": 0.0008076814251245613, |
|
"loss": 1.1315, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3663003663003663, |
|
"grad_norm": 0.19479534952981664, |
|
"learning_rate": 0.0007992121071421001, |
|
"loss": 1.1478, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3724053724053724, |
|
"grad_norm": 0.1880458408856176, |
|
"learning_rate": 0.0007906068741248461, |
|
"loss": 1.1281, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3785103785103785, |
|
"grad_norm": 0.15460815741303807, |
|
"learning_rate": 0.0007818696349405123, |
|
"loss": 1.1362, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.15045386348296025, |
|
"learning_rate": 0.0007730043584197021, |
|
"loss": 1.1259, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3907203907203907, |
|
"grad_norm": 0.20122942405887176, |
|
"learning_rate": 0.0007640150715530953, |
|
"loss": 1.1181, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3968253968253968, |
|
"grad_norm": 0.20533248267550017, |
|
"learning_rate": 0.0007549058576622157, |
|
"loss": 1.1293, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.40293040293040294, |
|
"grad_norm": 0.13677076752035072, |
|
"learning_rate": 0.0007456808545446102, |
|
"loss": 1.1221, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.409035409035409, |
|
"grad_norm": 0.1734062852611153, |
|
"learning_rate": 0.0007363442525942826, |
|
"loss": 1.118, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.41514041514041516, |
|
"grad_norm": 0.16356872758418958, |
|
"learning_rate": 0.0007269002928982366, |
|
"loss": 1.1231, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.42124542124542125, |
|
"grad_norm": 0.17309361004903426, |
|
"learning_rate": 0.0007173532653099911, |
|
"loss": 1.1146, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.42735042735042733, |
|
"grad_norm": 0.1326003328898491, |
|
"learning_rate": 0.0007077075065009433, |
|
"loss": 1.136, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.43345543345543347, |
|
"grad_norm": 0.1811730240238145, |
|
"learning_rate": 0.0006979673979904665, |
|
"loss": 1.1276, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 0.13629851189524775, |
|
"learning_rate": 0.0006881373641556346, |
|
"loss": 1.1113, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4456654456654457, |
|
"grad_norm": 0.21374115697770382, |
|
"learning_rate": 0.0006782218702214797, |
|
"loss": 1.1078, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4517704517704518, |
|
"grad_norm": 0.16886558971737456, |
|
"learning_rate": 0.000668225420232694, |
|
"loss": 1.1108, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.45787545787545786, |
|
"grad_norm": 0.16694922624121042, |
|
"learning_rate": 0.0006581525550076989, |
|
"loss": 1.1068, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.463980463980464, |
|
"grad_norm": 0.16014858561217288, |
|
"learning_rate": 0.0006480078500760096, |
|
"loss": 1.1185, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4700854700854701, |
|
"grad_norm": 0.18799411654589882, |
|
"learning_rate": 0.0006377959135998322, |
|
"loss": 1.1143, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 0.1270424719263773, |
|
"learning_rate": 0.0006275213842808383, |
|
"loss": 1.1055, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4822954822954823, |
|
"grad_norm": 0.17394420991284418, |
|
"learning_rate": 0.0006171889292530655, |
|
"loss": 1.105, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4884004884004884, |
|
"grad_norm": 0.15074553592858597, |
|
"learning_rate": 0.0006068032419629059, |
|
"loss": 1.1167, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4945054945054945, |
|
"grad_norm": 0.1575327685510419, |
|
"learning_rate": 0.0005963690400371386, |
|
"loss": 1.105, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5006105006105006, |
|
"grad_norm": 0.15495173159548875, |
|
"learning_rate": 0.0005858910631399817, |
|
"loss": 1.1079, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5067155067155067, |
|
"grad_norm": 0.12495502826181124, |
|
"learning_rate": 0.0005753740708201315, |
|
"loss": 1.0961, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 0.17798304801391704, |
|
"learning_rate": 0.0005648228403487712, |
|
"loss": 1.103, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.518925518925519, |
|
"grad_norm": 0.15145475180954737, |
|
"learning_rate": 0.0005542421645495279, |
|
"loss": 1.1082, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.525030525030525, |
|
"grad_norm": 0.1674135601603621, |
|
"learning_rate": 0.0005436368496213656, |
|
"loss": 1.0922, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5311355311355311, |
|
"grad_norm": 0.1322258266693411, |
|
"learning_rate": 0.0005330117129554028, |
|
"loss": 1.0969, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5372405372405372, |
|
"grad_norm": 0.16392404937146968, |
|
"learning_rate": 0.0005223715809466454, |
|
"loss": 1.1023, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5433455433455433, |
|
"grad_norm": 0.15868256924573232, |
|
"learning_rate": 0.0005117212868016303, |
|
"loss": 1.0961, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5494505494505495, |
|
"grad_norm": 0.1493324553731281, |
|
"learning_rate": 0.0005010656683429746, |
|
"loss": 1.0992, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.13303968473914282, |
|
"learning_rate": 0.0004904095658118283, |
|
"loss": 1.0869, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.5616605616605617, |
|
"grad_norm": 0.13445179172013122, |
|
"learning_rate": 0.0004797578196692281, |
|
"loss": 1.0979, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5677655677655677, |
|
"grad_norm": 0.1353400408710943, |
|
"learning_rate": 0.00046911526839735093, |
|
"loss": 1.1058, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5738705738705738, |
|
"grad_norm": 0.15268434606287648, |
|
"learning_rate": 0.0004584867463016671, |
|
"loss": 1.0955, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.57997557997558, |
|
"grad_norm": 0.14945079736642003, |
|
"learning_rate": 0.00044787708131499104, |
|
"loss": 1.0829, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5860805860805861, |
|
"grad_norm": 0.1308874678872484, |
|
"learning_rate": 0.0004372910928044249, |
|
"loss": 1.0899, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5921855921855922, |
|
"grad_norm": 0.1315948608079688, |
|
"learning_rate": 0.00042673358938219544, |
|
"loss": 1.0864, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5982905982905983, |
|
"grad_norm": 0.19438574034423878, |
|
"learning_rate": 0.00041620936672137393, |
|
"loss": 1.093, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6043956043956044, |
|
"grad_norm": 0.12572375675032343, |
|
"learning_rate": 0.00040572320537747656, |
|
"loss": 1.0875, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6105006105006106, |
|
"grad_norm": 0.12787583668085029, |
|
"learning_rate": 0.0003952798686169279, |
|
"loss": 1.1024, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6166056166056166, |
|
"grad_norm": 0.14209081780825783, |
|
"learning_rate": 0.00038488410025338133, |
|
"loss": 1.0938, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6227106227106227, |
|
"grad_norm": 0.13751458022764315, |
|
"learning_rate": 0.00037454062249287477, |
|
"loss": 1.0878, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6288156288156288, |
|
"grad_norm": 0.13954841564889886, |
|
"learning_rate": 0.0003642541337887999, |
|
"loss": 1.0912, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.6349206349206349, |
|
"grad_norm": 0.14997321061776903, |
|
"learning_rate": 0.00035402930670766296, |
|
"loss": 1.0747, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6410256410256411, |
|
"grad_norm": 0.1701293287916612, |
|
"learning_rate": 0.00034387078580660346, |
|
"loss": 1.0783, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6471306471306472, |
|
"grad_norm": 0.12199767794093884, |
|
"learning_rate": 0.00033378318552363664, |
|
"loss": 1.0776, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6532356532356532, |
|
"grad_norm": 0.16919824019980462, |
|
"learning_rate": 0.0003237710880815756, |
|
"loss": 1.0889, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6593406593406593, |
|
"grad_norm": 0.13811611845852387, |
|
"learning_rate": 0.00031383904140658986, |
|
"loss": 1.0762, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.6654456654456654, |
|
"grad_norm": 0.15905169364412688, |
|
"learning_rate": 0.0003039915570623396, |
|
"loss": 1.082, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.6715506715506715, |
|
"grad_norm": 0.1296226609467321, |
|
"learning_rate": 0.0002942331082006308, |
|
"loss": 1.0724, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6776556776556777, |
|
"grad_norm": 0.1330698219795046, |
|
"learning_rate": 0.00028456812752951485, |
|
"loss": 1.0723, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.6837606837606838, |
|
"grad_norm": 0.13173063625642664, |
|
"learning_rate": 0.0002750010052997635, |
|
"loss": 1.0783, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6898656898656899, |
|
"grad_norm": 0.11827576804873147, |
|
"learning_rate": 0.00026553608731062604, |
|
"loss": 1.0734, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6959706959706959, |
|
"grad_norm": 0.12044643950959122, |
|
"learning_rate": 0.00025617767293578176, |
|
"loss": 1.0695, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.702075702075702, |
|
"grad_norm": 0.12921730309691146, |
|
"learning_rate": 0.0002469300131703773, |
|
"loss": 1.0721, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7081807081807082, |
|
"grad_norm": 0.12533218466426468, |
|
"learning_rate": 0.00023779730870004235, |
|
"loss": 1.0608, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.12817655243582374, |
|
"learning_rate": 0.00022878370799275777, |
|
"loss": 1.0789, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7203907203907204, |
|
"grad_norm": 0.1293778348063508, |
|
"learning_rate": 0.0002198933054144414, |
|
"loss": 1.0602, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7264957264957265, |
|
"grad_norm": 0.1263148140338118, |
|
"learning_rate": 0.00021113013936911113, |
|
"loss": 1.0679, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.7326007326007326, |
|
"grad_norm": 0.13594429624621016, |
|
"learning_rate": 0.00020249819046446837, |
|
"loss": 1.0683, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7387057387057387, |
|
"grad_norm": 0.11897323140031583, |
|
"learning_rate": 0.00019400137970373356, |
|
"loss": 1.0715, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.7448107448107448, |
|
"grad_norm": 0.12134111257996806, |
|
"learning_rate": 0.00018564356670455767, |
|
"loss": 1.0737, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.7509157509157509, |
|
"grad_norm": 0.12412837845655514, |
|
"learning_rate": 0.00017742854794581785, |
|
"loss": 1.077, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.757020757020757, |
|
"grad_norm": 0.10995117277188224, |
|
"learning_rate": 0.00016936005504309342, |
|
"loss": 1.0725, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7631257631257631, |
|
"grad_norm": 0.12324463281674919, |
|
"learning_rate": 0.0001614417530536042, |
|
"loss": 1.0714, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.12422719561341983, |
|
"learning_rate": 0.00015367723881138434, |
|
"loss": 1.0764, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.7753357753357754, |
|
"grad_norm": 0.15097612554081813, |
|
"learning_rate": 0.00014607003929344492, |
|
"loss": 1.0735, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.7814407814407814, |
|
"grad_norm": 0.10792336331652043, |
|
"learning_rate": 0.00013862361001766972, |
|
"loss": 1.0656, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7875457875457875, |
|
"grad_norm": 0.10878726224284914, |
|
"learning_rate": 0.00013134133347316885, |
|
"loss": 1.054, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.7936507936507936, |
|
"grad_norm": 0.10538532292466002, |
|
"learning_rate": 0.0001242265175838072, |
|
"loss": 1.0565, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7997557997557998, |
|
"grad_norm": 0.132655865253661, |
|
"learning_rate": 0.00011728239420560316, |
|
"loss": 1.0623, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.8058608058608059, |
|
"grad_norm": 0.13898152967373176, |
|
"learning_rate": 0.0001105121176586793, |
|
"loss": 1.062, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.811965811965812, |
|
"grad_norm": 0.1284900261818914, |
|
"learning_rate": 0.00010391876329443534, |
|
"loss": 1.0573, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.818070818070818, |
|
"grad_norm": 0.12601548049011016, |
|
"learning_rate": 9.750532609858991e-05, |
|
"loss": 1.0627, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8241758241758241, |
|
"grad_norm": 0.10148390380785619, |
|
"learning_rate": 9.127471933073007e-05, |
|
"loss": 1.0689, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8302808302808303, |
|
"grad_norm": 0.11101937832472626, |
|
"learning_rate": 8.522977320098224e-05, |
|
"loss": 1.0592, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8363858363858364, |
|
"grad_norm": 0.11000851597542352, |
|
"learning_rate": 7.937323358440934e-05, |
|
"loss": 1.0613, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.8424908424908425, |
|
"grad_norm": 0.13212021122536566, |
|
"learning_rate": 7.370776077371622e-05, |
|
"loss": 1.055, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.8485958485958486, |
|
"grad_norm": 0.12501157813798788, |
|
"learning_rate": 6.82359282708292e-05, |
|
"loss": 1.0644, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.8547008547008547, |
|
"grad_norm": 0.12182549104138736, |
|
"learning_rate": 6.296022161790149e-05, |
|
"loss": 1.0686, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8608058608058609, |
|
"grad_norm": 0.10898863882704415, |
|
"learning_rate": 5.78830372682721e-05, |
|
"loss": 1.0595, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.8669108669108669, |
|
"grad_norm": 0.1292362091327554, |
|
"learning_rate": 5.300668149789417e-05, |
|
"loss": 1.0628, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.873015873015873, |
|
"grad_norm": 0.11135921961101819, |
|
"learning_rate": 4.833336935772442e-05, |
|
"loss": 1.0532, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 0.1040842333058969, |
|
"learning_rate": 4.386522366755169e-05, |
|
"loss": 1.0548, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.8852258852258852, |
|
"grad_norm": 0.11114144762934852, |
|
"learning_rate": 3.960427405172079e-05, |
|
"loss": 1.0602, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.8913308913308914, |
|
"grad_norm": 0.10851073660711448, |
|
"learning_rate": 3.5552456017189926e-05, |
|
"loss": 1.0615, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.8974358974358975, |
|
"grad_norm": 0.1046722141969564, |
|
"learning_rate": 3.171161007433937e-05, |
|
"loss": 1.0542, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.9035409035409036, |
|
"grad_norm": 0.11710428457476799, |
|
"learning_rate": 2.808348090093277e-05, |
|
"loss": 1.0671, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9096459096459096, |
|
"grad_norm": 0.11332319869107195, |
|
"learning_rate": 2.466971654960931e-05, |
|
"loss": 1.0611, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.9157509157509157, |
|
"grad_norm": 0.10374082585506901, |
|
"learning_rate": 2.147186769926712e-05, |
|
"loss": 1.0498, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9218559218559218, |
|
"grad_norm": 0.11492942958557875, |
|
"learning_rate": 1.8491386950677812e-05, |
|
"loss": 1.0571, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.927960927960928, |
|
"grad_norm": 0.11080015958091284, |
|
"learning_rate": 1.572962816665302e-05, |
|
"loss": 1.0558, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9340659340659341, |
|
"grad_norm": 0.117953492017612, |
|
"learning_rate": 1.3187845857061508e-05, |
|
"loss": 1.0462, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.9401709401709402, |
|
"grad_norm": 0.11023247026103157, |
|
"learning_rate": 1.0867194608976228e-05, |
|
"loss": 1.0731, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.9462759462759462, |
|
"grad_norm": 0.11752064314919194, |
|
"learning_rate": 8.768728562211947e-06, |
|
"loss": 1.058, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.10584711974817956, |
|
"learning_rate": 6.893400930488569e-06, |
|
"loss": 1.0638, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.9584859584859585, |
|
"grad_norm": 0.12776920092469246, |
|
"learning_rate": 5.242063568441313e-06, |
|
"loss": 1.0633, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.9645909645909646, |
|
"grad_norm": 0.10955827972683771, |
|
"learning_rate": 3.815466584670746e-06, |
|
"loss": 1.0533, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.9706959706959707, |
|
"grad_norm": 0.11036039903845696, |
|
"learning_rate": 2.6142580010117823e-06, |
|
"loss": 1.0576, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.9768009768009768, |
|
"grad_norm": 0.10123727837311189, |
|
"learning_rate": 1.6389834581739814e-06, |
|
"loss": 1.0609, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9829059829059829, |
|
"grad_norm": 0.11176827609005396, |
|
"learning_rate": 8.900859678879769e-07, |
|
"loss": 1.0575, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.989010989010989, |
|
"grad_norm": 0.10493373702681778, |
|
"learning_rate": 3.6790571167061305e-07, |
|
"loss": 1.0604, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.9951159951159951, |
|
"grad_norm": 0.11496441250413074, |
|
"learning_rate": 7.26798862996092e-08, |
|
"loss": 1.0534, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.4183021783828735, |
|
"eval_runtime": 107.3754, |
|
"eval_samples_per_second": 195.501, |
|
"eval_steps_per_second": 6.109, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 819, |
|
"total_flos": 45247522406400.0, |
|
"train_loss": 1.1312509008088536, |
|
"train_runtime": 1838.4003, |
|
"train_samples_per_second": 57.016, |
|
"train_steps_per_second": 0.445 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 819, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 45247522406400.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|