|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9993522716757657, |
|
"eval_steps": 500, |
|
"global_step": 1350, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007402609419820486, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 1.4814814814814817e-07, |
|
"loss": 2.7636, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0037013047099102433, |
|
"grad_norm": 13.5, |
|
"learning_rate": 7.407407407407407e-07, |
|
"loss": 2.7044, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007402609419820487, |
|
"grad_norm": 12.0625, |
|
"learning_rate": 1.4814814814814815e-06, |
|
"loss": 2.6933, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01110391412973073, |
|
"grad_norm": 9.75, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 2.6397, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.014805218839640973, |
|
"grad_norm": 9.25, |
|
"learning_rate": 2.962962962962963e-06, |
|
"loss": 2.5469, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.018506523549551217, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 2.4799, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02220782825946146, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 2.4496, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.025909132969371702, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 5.185185185185185e-06, |
|
"loss": 2.3571, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.029610437679281947, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 5.925925925925926e-06, |
|
"loss": 2.3253, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03331174238919219, |
|
"grad_norm": 2.75, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.3086, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.037013047099102435, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 2.2763, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04071435180901268, |
|
"grad_norm": 2.375, |
|
"learning_rate": 8.148148148148148e-06, |
|
"loss": 2.2683, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04441565651892292, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 2.2694, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04811696122883316, |
|
"grad_norm": 2.25, |
|
"learning_rate": 9.62962962962963e-06, |
|
"loss": 2.2582, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.051818265938743405, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.037037037037037e-05, |
|
"loss": 2.2606, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05551957064865365, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 2.2405, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05922087535856389, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.1851851851851852e-05, |
|
"loss": 2.2496, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06292218006847414, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.2592592592592593e-05, |
|
"loss": 2.2547, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06662348477838438, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 2.2488, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07032478948829463, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.4074074074074075e-05, |
|
"loss": 2.2479, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07402609419820487, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 2.2405, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07772739890811511, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.555555555555556e-05, |
|
"loss": 2.222, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.08142870361802536, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.6296296296296297e-05, |
|
"loss": 2.2254, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0851300083279356, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.7037037037037038e-05, |
|
"loss": 2.2505, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.08883131303784585, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 2.2367, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09253261774775609, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.851851851851852e-05, |
|
"loss": 2.2275, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09623392245766632, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.925925925925926e-05, |
|
"loss": 2.2258, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09993522716757657, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 2e-05, |
|
"loss": 2.2103, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.10363653187748681, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.9999164298554375e-05, |
|
"loss": 2.2442, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10733783658739705, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 1.9996657333896875e-05, |
|
"loss": 2.2542, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1110391412973073, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 1.9992479525042305e-05, |
|
"loss": 2.2434, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11474044600721754, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.9986631570270835e-05, |
|
"loss": 2.2195, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.11844175071712779, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.9979114447011323e-05, |
|
"loss": 2.2221, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12214305542703803, |
|
"grad_norm": 2.0, |
|
"learning_rate": 1.996992941167792e-05, |
|
"loss": 2.2276, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.12584436013694827, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.9959077999460094e-05, |
|
"loss": 2.2112, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1295456648468585, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.9946562024066018e-05, |
|
"loss": 2.2086, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.13324696955676876, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 1.9932383577419432e-05, |
|
"loss": 2.2095, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.136948274266679, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 1.991654502931001e-05, |
|
"loss": 2.2194, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.14064957897658925, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 1.9899049026997272e-05, |
|
"loss": 2.229, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14435088368649948, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 1.9879898494768093e-05, |
|
"loss": 2.2252, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.14805218839640974, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 1.9859096633447965e-05, |
|
"loss": 2.2168, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15175349310631997, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.9836646919866012e-05, |
|
"loss": 2.2246, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.15545479781623023, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 1.9812553106273848e-05, |
|
"loss": 2.2151, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15915610252614046, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.9786819219718443e-05, |
|
"loss": 2.2176, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.16285740723605072, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 1.9759449561369036e-05, |
|
"loss": 2.2279, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.16655871194596095, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.973044870579824e-05, |
|
"loss": 2.2113, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1702600166558712, |
|
"grad_norm": 1.875, |
|
"learning_rate": 1.9699821500217436e-05, |
|
"loss": 2.2418, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.17396132136578144, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 1.9667573063666622e-05, |
|
"loss": 2.2206, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1776626260756917, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 1.9633708786158803e-05, |
|
"loss": 2.2162, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18136393078560192, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 1.959823432777912e-05, |
|
"loss": 2.2015, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.18506523549551218, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.95611556177388e-05, |
|
"loss": 2.2095, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1887665402054224, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.9522478853384154e-05, |
|
"loss": 2.2245, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.19246784491533264, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 1.9482210499160767e-05, |
|
"loss": 2.2179, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1961691496252429, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.9440357285533e-05, |
|
"loss": 2.2132, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.19987045433515313, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 2.2024, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2035717590450634, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.93519245252219e-05, |
|
"loss": 2.1749, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.20727306375497362, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 1.9305359759215686e-05, |
|
"loss": 2.2137, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.21097436846488388, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 1.9257239692688907e-05, |
|
"loss": 2.2294, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2146756731747941, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.9207572368443386e-05, |
|
"loss": 2.2023, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.21837697788470437, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 1.9156366087890062e-05, |
|
"loss": 2.2113, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.2220782825946146, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 1.9103629409661468e-05, |
|
"loss": 2.2245, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22577958730452485, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.9049371148181253e-05, |
|
"loss": 2.2102, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.22948089201443508, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 1.8993600372190933e-05, |
|
"loss": 2.2243, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.23318219672434534, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.8936326403234125e-05, |
|
"loss": 2.2009, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.23688350143425557, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.8877558814098564e-05, |
|
"loss": 2.2078, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.24058480614416583, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 1.881730742721608e-05, |
|
"loss": 2.2031, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.24428611085407606, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 1.8755582313020912e-05, |
|
"loss": 2.1597, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.24798741556398632, |
|
"grad_norm": 1.875, |
|
"learning_rate": 1.8692393788266477e-05, |
|
"loss": 2.1922, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.25168872027389655, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 1.8627752414301087e-05, |
|
"loss": 2.1883, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2553900249838068, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.8561668995302668e-05, |
|
"loss": 2.2097, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.259091329693717, |
|
"grad_norm": 1.875, |
|
"learning_rate": 1.8494154576472976e-05, |
|
"loss": 2.2106, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.26279263440362727, |
|
"grad_norm": 1.875, |
|
"learning_rate": 1.8425220442191496e-05, |
|
"loss": 2.2035, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2664939391135375, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.8354878114129368e-05, |
|
"loss": 2.1937, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2701952438234478, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.8283139349323632e-05, |
|
"loss": 2.1828, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.273896548533358, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 1.8210016138212186e-05, |
|
"loss": 2.189, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.27759785324326824, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.8135520702629677e-05, |
|
"loss": 2.2025, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2812991579531785, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.8059665493764745e-05, |
|
"loss": 2.1967, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.28500046266308876, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.7982463190078928e-05, |
|
"loss": 2.1726, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.28870176737299896, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.7903926695187595e-05, |
|
"loss": 2.1758, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2924030720829092, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.78240691357032e-05, |
|
"loss": 2.186, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2961043767928195, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.7742903859041324e-05, |
|
"loss": 2.1866, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29980568150272974, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 2.1996, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.30350698621263994, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.757670463444118e-05, |
|
"loss": 2.1657, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3072082909225502, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.749169846508936e-05, |
|
"loss": 2.1938, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.31090959563246046, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.740544013109005e-05, |
|
"loss": 2.1802, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.31461090034237066, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.7317944049686125e-05, |
|
"loss": 2.1961, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3183122050522809, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.722922484499793e-05, |
|
"loss": 2.1849, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3220135097621912, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 1.7139297345578992e-05, |
|
"loss": 2.2075, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.32571481447210143, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.7048176581937562e-05, |
|
"loss": 2.1531, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.32941611918201164, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.6955877784024418e-05, |
|
"loss": 2.1763, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.3331174238919219, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.686241637868734e-05, |
|
"loss": 2.2074, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.33681872860183215, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.676780798709262e-05, |
|
"loss": 2.1793, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.3405200333117424, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.6672068422114195e-05, |
|
"loss": 2.1853, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3442213380216526, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.657521368569064e-05, |
|
"loss": 2.1804, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.34792264273156287, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 1.647725996615059e-05, |
|
"loss": 2.1836, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.35162394744147313, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 1.637822363550706e-05, |
|
"loss": 2.1714, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3553252521513834, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 1.627812124672099e-05, |
|
"loss": 2.2257, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3590265568612936, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 1.6176969530934573e-05, |
|
"loss": 2.1983, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.36272786157120385, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.6074785394674835e-05, |
|
"loss": 2.1925, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.3664291662811141, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.5971585917027864e-05, |
|
"loss": 2.1606, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.37013047099102436, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.586738834678418e-05, |
|
"loss": 2.1738, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.37383177570093457, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.5762210099555804e-05, |
|
"loss": 2.17, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.3775330804108448, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.5656068754865388e-05, |
|
"loss": 2.1759, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3812343851207551, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.554898205320797e-05, |
|
"loss": 2.2016, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3849356898306653, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.5440967893085827e-05, |
|
"loss": 2.1711, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.38863699454057554, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 1.5332044328016916e-05, |
|
"loss": 2.1809, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3923382992504858, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.5222229563517385e-05, |
|
"loss": 2.2018, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.39603960396039606, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.5111541954058733e-05, |
|
"loss": 2.1762, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.39974090867030626, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 2.1955, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4034422133802165, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.4887622344495643e-05, |
|
"loss": 2.1855, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.4071435180901268, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.4774427770379492e-05, |
|
"loss": 2.174, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.41084482280003704, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.4660435197025391e-05, |
|
"loss": 2.1727, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.41454612750994724, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.4545663677185007e-05, |
|
"loss": 2.1715, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.4182474322198575, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.4430132393803353e-05, |
|
"loss": 2.1893, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.42194873692976775, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.4313860656812537e-05, |
|
"loss": 2.1734, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.425650041639678, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.4196867899904292e-05, |
|
"loss": 2.1759, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4293513463495882, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 1.4079173677281836e-05, |
|
"loss": 2.1615, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4330526510594985, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.396079766039157e-05, |
|
"loss": 2.1769, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.43675395576940873, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.3841759634635177e-05, |
|
"loss": 2.1867, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.44045526047931893, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.3722079496062702e-05, |
|
"loss": 2.1836, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.4441565651892292, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.3601777248047105e-05, |
|
"loss": 2.1803, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.44785786989913945, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.3480872997940906e-05, |
|
"loss": 2.1667, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.4515591746090497, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.3359386953715423e-05, |
|
"loss": 2.1644, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.4552604793189599, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.3237339420583213e-05, |
|
"loss": 2.1769, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.45896178402887017, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 1.3114750797604248e-05, |
|
"loss": 2.1611, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.4626630887387804, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.2991641574276419e-05, |
|
"loss": 2.1676, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4663643934486907, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 1.2868032327110904e-05, |
|
"loss": 2.2038, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.4700656981586009, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.2743943716193017e-05, |
|
"loss": 2.2025, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.47376700286851114, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.261939648172906e-05, |
|
"loss": 2.1784, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4774683075784214, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.2494411440579814e-05, |
|
"loss": 2.1805, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.48116961228833166, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.2369009482781191e-05, |
|
"loss": 2.1945, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.48487091699824186, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.2243211568052678e-05, |
|
"loss": 2.1775, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.4885722217081521, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.211703872229411e-05, |
|
"loss": 2.1832, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4922735264180624, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 1.1990512034071407e-05, |
|
"loss": 2.1899, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.49597483112797264, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.1863652651091824e-05, |
|
"loss": 2.1675, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.49967613583788284, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 2.1881, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5033774405477931, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.1609020666180574e-05, |
|
"loss": 2.1825, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5070787452577034, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 1.1481290623512491e-05, |
|
"loss": 2.1875, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.5107800499676136, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.1353312997501313e-05, |
|
"loss": 2.1693, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5144813546775239, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 1.1225109178364456e-05, |
|
"loss": 2.169, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.518182659387434, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.1096700594125318e-05, |
|
"loss": 2.1714, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5218839640973443, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.0968108707031792e-05, |
|
"loss": 2.179, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.5255852688072545, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.0839355009969068e-05, |
|
"loss": 2.153, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5292865735171648, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.0710461022867303e-05, |
|
"loss": 2.1683, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.532987878227075, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.0581448289104759e-05, |
|
"loss": 2.1459, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5366891829369853, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 1.0452338371907065e-05, |
|
"loss": 2.1666, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.5403904876468956, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.0323152850743107e-05, |
|
"loss": 2.173, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.5440917923568058, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.0193913317718245e-05, |
|
"loss": 2.1745, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.547793097066716, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 1.0064641373965394e-05, |
|
"loss": 2.1602, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.5514944017766262, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 9.935358626034607e-06, |
|
"loss": 2.1608, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.5551957064865365, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 9.806086682281759e-06, |
|
"loss": 2.1906, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5588970111964467, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 9.676847149256894e-06, |
|
"loss": 2.1502, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.562598315906357, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 9.547661628092938e-06, |
|
"loss": 2.1648, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.5662996206162673, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 9.418551710895243e-06, |
|
"loss": 2.1866, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.5700009253261775, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 9.289538977132702e-06, |
|
"loss": 2.1776, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.5737022300360877, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 9.160644990030932e-06, |
|
"loss": 2.1665, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5774035347459979, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 9.03189129296821e-06, |
|
"loss": 2.1891, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5811048394559082, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 8.903299405874685e-06, |
|
"loss": 2.1894, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5848061441658184, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 8.774890821635548e-06, |
|
"loss": 2.167, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.5885074488757287, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 8.646687002498692e-06, |
|
"loss": 2.1759, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.592208753585639, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 8.518709376487515e-06, |
|
"loss": 2.155, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5959100582955492, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 8.390979333819427e-06, |
|
"loss": 2.1734, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.5996113630054595, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 2.1619, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6033126677153696, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 8.13634734890818e-06, |
|
"loss": 2.1509, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.6070139724252799, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 8.009487965928597e-06, |
|
"loss": 2.1662, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.6107152771351901, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 7.882961277705897e-06, |
|
"loss": 2.1315, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.6144165818451004, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 7.756788431947327e-06, |
|
"loss": 2.1737, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.6181178865550107, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 7.630990517218809e-06, |
|
"loss": 2.1673, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.6218191912649209, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 7.505588559420188e-06, |
|
"loss": 2.156, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.6255204959748312, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 7.380603518270942e-06, |
|
"loss": 2.1576, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.6292218006847413, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 7.256056283806987e-06, |
|
"loss": 2.1715, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.6329231053946516, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 7.131967672889101e-06, |
|
"loss": 2.1872, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.6366244101045618, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 7.008358425723586e-06, |
|
"loss": 2.1596, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.6403257148144721, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 6.885249202395754e-06, |
|
"loss": 2.1546, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.6440270195243823, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 6.762660579416791e-06, |
|
"loss": 2.1499, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.6477283242342926, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 6.640613046284581e-06, |
|
"loss": 2.1816, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.6514296289442029, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 6.519127002059096e-06, |
|
"loss": 2.1675, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.6551309336541131, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 6.3982227519528986e-06, |
|
"loss": 2.1819, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.6588322383640233, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 6.277920503937303e-06, |
|
"loss": 2.1628, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.6625335430739335, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 6.158240365364823e-06, |
|
"loss": 2.1595, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.6662348477838438, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 6.039202339608432e-06, |
|
"loss": 2.1604, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.669936152493754, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 5.920826322718165e-06, |
|
"loss": 2.1498, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.6736374572036643, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 5.80313210009571e-06, |
|
"loss": 2.1612, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.6773387619135746, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 5.686139343187468e-06, |
|
"loss": 2.1668, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.6810400666234848, |
|
"grad_norm": 1.75, |
|
"learning_rate": 5.569867606196652e-06, |
|
"loss": 2.1952, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6847413713333951, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 5.454336322814995e-06, |
|
"loss": 2.1699, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.6884426760433052, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 5.339564802974615e-06, |
|
"loss": 2.176, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.6921439807532155, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 5.2255722296205104e-06, |
|
"loss": 2.1927, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.6958452854631257, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 5.112377655504359e-06, |
|
"loss": 2.1742, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.699546590173036, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 2.1835, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.7032478948829463, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 4.888458045941269e-06, |
|
"loss": 2.1855, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.7069491995928565, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 4.7777704364826175e-06, |
|
"loss": 2.1922, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.7106505043027668, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 4.66795567198309e-06, |
|
"loss": 2.1563, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.7143518090126769, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 4.559032106914173e-06, |
|
"loss": 2.1522, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.7180531137225872, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 4.4510179467920325e-06, |
|
"loss": 2.175, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.7217544184324974, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 4.343931245134616e-06, |
|
"loss": 2.1506, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.7254557231424077, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 4.237789900444197e-06, |
|
"loss": 2.1555, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.729157027852318, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 4.132611653215822e-06, |
|
"loss": 2.1142, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.7328583325622282, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 4.028414082972141e-06, |
|
"loss": 2.1617, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.7365596372721385, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 3.925214605325164e-06, |
|
"loss": 2.1854, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.7402609419820487, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 3.823030469065431e-06, |
|
"loss": 2.1767, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7439622466919589, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 3.7218787532790167e-06, |
|
"loss": 2.1667, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.7476635514018691, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 3.6217763644929393e-06, |
|
"loss": 2.1532, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.7513648561117794, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 3.522740033849411e-06, |
|
"loss": 2.1617, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.7550661608216896, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 3.424786314309365e-06, |
|
"loss": 2.1549, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.7587674655315999, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 3.3279315778858034e-06, |
|
"loss": 2.1501, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.7624687702415102, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 3.2321920129073815e-06, |
|
"loss": 2.164, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.7661700749514204, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 3.1375836213126653e-06, |
|
"loss": 2.1473, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.7698713796613306, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 3.04412221597558e-06, |
|
"loss": 2.1597, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.7735726843712408, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 2.9518234180624393e-06, |
|
"loss": 2.165, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.7772739890811511, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 2.8607026544210115e-06, |
|
"loss": 2.1634, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7809752937910613, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 2.770775155002071e-06, |
|
"loss": 2.1559, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.7846765985009716, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 2.6820559503138797e-06, |
|
"loss": 2.1638, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.7883779032108819, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 2.594559868909956e-06, |
|
"loss": 2.1604, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.7920792079207921, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 2.50830153491064e-06, |
|
"loss": 2.1602, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.7957805126307024, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 2.423295365558821e-06, |
|
"loss": 2.1588, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.7994818173406125, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 2.1525, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.8031831220505228, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 2.2570961409586756e-06, |
|
"loss": 2.1622, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.806884426760433, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 2.1759308642968024e-06, |
|
"loss": 2.1544, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.8105857314703433, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 2.0960733048124082e-06, |
|
"loss": 2.158, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.8142870361802536, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 2.01753680992107e-06, |
|
"loss": 2.1827, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.8179883408901638, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.9403345062352574e-06, |
|
"loss": 2.1579, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.8216896456000741, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.8644792973703252e-06, |
|
"loss": 2.1601, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.8253909503099842, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 1.7899838617878163e-06, |
|
"loss": 2.168, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.8290922550198945, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.7168606506763696e-06, |
|
"loss": 2.1706, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.8327935597298047, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.6451218858706374e-06, |
|
"loss": 2.175, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.836494864439715, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.5747795578085046e-06, |
|
"loss": 2.1803, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.8401961691496252, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.505845423527027e-06, |
|
"loss": 2.1543, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.8438974738595355, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.4383310046973365e-06, |
|
"loss": 2.1556, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.8475987785694458, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.372247585698916e-06, |
|
"loss": 2.1772, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.851300083279356, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 1.307606211733522e-06, |
|
"loss": 2.1542, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.8550013879892662, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.2444176869790925e-06, |
|
"loss": 2.1635, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.8587026926991764, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.18269257278392e-06, |
|
"loss": 2.144, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.8624039974090867, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.1224411859014417e-06, |
|
"loss": 2.153, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.866105302118997, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 1.0636735967658785e-06, |
|
"loss": 2.1537, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.8698066068289072, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.0063996278090704e-06, |
|
"loss": 2.1913, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.8735079115388175, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 9.506288518187468e-07, |
|
"loss": 2.1783, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.8772092162487277, |
|
"grad_norm": 1.75, |
|
"learning_rate": 8.963705903385344e-07, |
|
"loss": 2.1502, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.8809105209586379, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 8.436339121099413e-07, |
|
"loss": 2.1642, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.8846118256685481, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 7.924276315566171e-07, |
|
"loss": 2.1625, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.8883131303784584, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 7.427603073110967e-07, |
|
"loss": 2.1741, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8920144350883686, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 6.946402407843156e-07, |
|
"loss": 2.1467, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.8957157397982789, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 6.480754747781037e-07, |
|
"loss": 2.1597, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.8994170445081892, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 2.1653, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.9031183492180994, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 5.596427144670002e-07, |
|
"loss": 2.173, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.9068196539280097, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 5.177895008392353e-07, |
|
"loss": 2.1844, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.9105209586379198, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 4.775211466158469e-07, |
|
"loss": 2.1385, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.9142222633478301, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 4.388443822612043e-07, |
|
"loss": 2.1521, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.9179235680577403, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 4.017656722208807e-07, |
|
"loss": 2.1692, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.9216248727676506, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 3.662912138411967e-07, |
|
"loss": 2.1767, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.9253261774775609, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 3.3242693633337986e-07, |
|
"loss": 2.1517, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.9290274821874711, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 3.001784997825652e-07, |
|
"loss": 2.1849, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.9327287868973814, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 2.6955129420176193e-07, |
|
"loss": 2.1564, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.9364300916072915, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 2.405504386309643e-07, |
|
"loss": 2.1831, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.9401313963172018, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 2.1318078028155886e-07, |
|
"loss": 2.1436, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.943832701027112, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 1.874468937261531e-07, |
|
"loss": 2.159, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.9475340057370223, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 1.6335308013398888e-07, |
|
"loss": 2.1677, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.9512353104469325, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.409033665520354e-07, |
|
"loss": 2.1872, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.9549366151568428, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 1.201015052319099e-07, |
|
"loss": 2.1408, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.9586379198667531, |
|
"grad_norm": 1.75, |
|
"learning_rate": 1.0095097300273026e-07, |
|
"loss": 2.1421, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.9623392245766633, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 8.345497068998897e-08, |
|
"loss": 2.1545, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.9660405292865735, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 6.761642258056977e-08, |
|
"loss": 2.1459, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.9697418339964837, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 5.3437975933985366e-08, |
|
"loss": 2.1508, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.973443138706394, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 4.0922000539906914e-08, |
|
"loss": 2.1533, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.9771444434163042, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 3.0070588322079765e-08, |
|
"loss": 2.1527, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.9808457481262145, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 2.088555298867978e-08, |
|
"loss": 2.1634, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.9845470528361248, |
|
"grad_norm": 1.75, |
|
"learning_rate": 1.3368429729168075e-08, |
|
"loss": 2.1714, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.988248357546035, |
|
"grad_norm": 1.75, |
|
"learning_rate": 7.520474957699586e-09, |
|
"loss": 2.1449, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.9919496622559453, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 3.3426661031255024e-09, |
|
"loss": 2.1756, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.9956509669658554, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 8.357014456272794e-10, |
|
"loss": 2.1534, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.9993522716757657, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 0.0, |
|
"loss": 2.1688, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9993522716757657, |
|
"eval_loss": 2.16412353515625, |
|
"eval_runtime": 221.9604, |
|
"eval_samples_per_second": 5.379, |
|
"eval_steps_per_second": 2.69, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9993522716757657, |
|
"step": 1350, |
|
"total_flos": 5.373491620442276e+17, |
|
"train_loss": 2.19371005181913, |
|
"train_runtime": 17533.5186, |
|
"train_samples_per_second": 1.233, |
|
"train_steps_per_second": 0.077 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1350, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.373491620442276e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|