|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999627004848937, |
|
"eval_steps": 500, |
|
"global_step": 670, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.477611940298507e-07, |
|
"loss": 2.637, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 4.738805970149253e-06, |
|
"loss": 2.6461, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.477611940298506e-06, |
|
"loss": 2.6439, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 1.4216417910447761e-05, |
|
"loss": 2.642, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 1.895522388059701e-05, |
|
"loss": 2.6207, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 2.369402985074627e-05, |
|
"loss": 2.6062, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 2.8432835820895522e-05, |
|
"loss": 2.5733, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 3.317164179104477e-05, |
|
"loss": 2.531, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 3.791044776119402e-05, |
|
"loss": 2.495, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 4.2649253731343286e-05, |
|
"loss": 2.4618, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1494140625, |
|
"learning_rate": 4.738805970149254e-05, |
|
"loss": 2.4374, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1513671875, |
|
"learning_rate": 5.2126865671641794e-05, |
|
"loss": 2.4205, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.15234375, |
|
"learning_rate": 5.6865671641791044e-05, |
|
"loss": 2.3846, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1298828125, |
|
"learning_rate": 6.16044776119403e-05, |
|
"loss": 2.3675, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 6.349612195786573e-05, |
|
"loss": 2.3648, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.138671875, |
|
"learning_rate": 6.3472426242142e-05, |
|
"loss": 2.3394, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 6.342720533773385e-05, |
|
"loss": 2.3241, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 6.336048992919527e-05, |
|
"loss": 2.3123, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 6.327232528613285e-05, |
|
"loss": 2.3201, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.0908203125, |
|
"learning_rate": 6.316277123248829e-05, |
|
"loss": 2.2999, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.07958984375, |
|
"learning_rate": 6.303190210594489e-05, |
|
"loss": 2.3114, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.0712890625, |
|
"learning_rate": 6.287980670748592e-05, |
|
"loss": 2.3002, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.072265625, |
|
"learning_rate": 6.270658824113884e-05, |
|
"loss": 2.2732, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.07275390625, |
|
"learning_rate": 6.251236424394651e-05, |
|
"loss": 2.2867, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.0732421875, |
|
"learning_rate": 6.229726650621257e-05, |
|
"loss": 2.2718, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.0751953125, |
|
"learning_rate": 6.20614409820754e-05, |
|
"loss": 2.274, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.072265625, |
|
"learning_rate": 6.180504769047129e-05, |
|
"loss": 2.2688, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 6.152826060655387e-05, |
|
"loss": 2.272, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.07861328125, |
|
"learning_rate": 6.123126754364366e-05, |
|
"loss": 2.2807, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.072265625, |
|
"learning_rate": 6.091427002578765e-05, |
|
"loss": 2.2519, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.0732421875, |
|
"learning_rate": 6.057748315101562e-05, |
|
"loss": 2.2648, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.07177734375, |
|
"learning_rate": 6.0221135445385774e-05, |
|
"loss": 2.2739, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.07470703125, |
|
"learning_rate": 5.984546870791885e-05, |
|
"loss": 2.2764, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.0751953125, |
|
"learning_rate": 5.945073784652589e-05, |
|
"loss": 2.2514, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.078125, |
|
"learning_rate": 5.9037210705040984e-05, |
|
"loss": 2.2544, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.07470703125, |
|
"learning_rate": 5.860516788147634e-05, |
|
"loss": 2.2479, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.07763671875, |
|
"learning_rate": 5.815490253762313e-05, |
|
"loss": 2.253, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.080078125, |
|
"learning_rate": 5.7686720200127084e-05, |
|
"loss": 2.2528, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 5.7200938553174043e-05, |
|
"loss": 2.253, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.078125, |
|
"learning_rate": 5.669788722292595e-05, |
|
"loss": 2.2571, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 5.617790755385372e-05, |
|
"loss": 2.2666, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.07958984375, |
|
"learning_rate": 5.5641352377118605e-05, |
|
"loss": 2.2601, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.07861328125, |
|
"learning_rate": 5.508858577115933e-05, |
|
"loss": 2.2471, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.080078125, |
|
"learning_rate": 5.451998281464741e-05, |
|
"loss": 2.2523, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.07958984375, |
|
"learning_rate": 5.393592933197822e-05, |
|
"loss": 2.244, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 5.333682163147071e-05, |
|
"loss": 2.242, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.0771484375, |
|
"learning_rate": 5.2723066236453086e-05, |
|
"loss": 2.2519, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 5.209507960941733e-05, |
|
"loss": 2.2521, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 5.145328786942933e-05, |
|
"loss": 2.2525, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.07958984375, |
|
"learning_rate": 5.0798126502986685e-05, |
|
"loss": 2.2423, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.080078125, |
|
"learning_rate": 5.013004006852019e-05, |
|
"loss": 2.2444, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 4.944948189473962e-05, |
|
"loss": 2.2562, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 4.875691377302846e-05, |
|
"loss": 2.2443, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 4.805280564409623e-05, |
|
"loss": 2.2492, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 4.7337635279101233e-05, |
|
"loss": 2.2402, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 4.661188795545985e-05, |
|
"loss": 2.2402, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 4.5876056127562524e-05, |
|
"loss": 2.2464, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 4.5130639092619825e-05, |
|
"loss": 2.2518, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 4.437614265186536e-05, |
|
"loss": 2.2491, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 4.361307876734529e-05, |
|
"loss": 2.2474, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 4.2841965214527606e-05, |
|
"loss": 2.2634, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 4.206332523096655e-05, |
|
"loss": 2.2433, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 4.127768716126082e-05, |
|
"loss": 2.2468, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 4.0485584098546456e-05, |
|
"loss": 2.2399, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 3.968755352276755e-05, |
|
"loss": 2.2431, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 3.888413693597025e-05, |
|
"loss": 2.2309, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 3.8075879494867705e-05, |
|
"loss": 2.2298, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 3.726332964092504e-05, |
|
"loss": 2.2443, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.09033203125, |
|
"learning_rate": 3.644703872821547e-05, |
|
"loss": 2.2481, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 3.5627560649300175e-05, |
|
"loss": 2.2375, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 3.4805451459385544e-05, |
|
"loss": 2.2368, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 3.398126899901305e-05, |
|
"loss": 2.2359, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 3.31555725155377e-05, |
|
"loss": 2.2472, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 3.232892228365181e-05, |
|
"loss": 2.2511, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 3.15018792252118e-05, |
|
"loss": 2.2273, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 3.067500452862575e-05, |
|
"loss": 2.2391, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 2.984885926806012e-05, |
|
"loss": 2.2424, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 2.9024004022724027e-05, |
|
"loss": 2.2435, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 2.8200998496489373e-05, |
|
"loss": 2.223, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 2.7380401138104845e-05, |
|
"loss": 2.2235, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.08984375, |
|
"learning_rate": 2.656276876226166e-05, |
|
"loss": 2.2379, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 2.5748656171768127e-05, |
|
"loss": 2.2232, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 2.4938615781089216e-05, |
|
"loss": 2.2492, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 2.413319724150689e-05, |
|
"loss": 2.2429, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 2.3332947068155364e-05, |
|
"loss": 2.2486, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 2.2538408269184268e-05, |
|
"loss": 2.2415, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 2.1750119977301616e-05, |
|
"loss": 2.2568, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 2.096861708394641e-05, |
|
"loss": 2.235, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 2.0194429876339054e-05, |
|
"loss": 2.2364, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 1.9428083677656066e-05, |
|
"loss": 2.2439, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 1.8670098490573132e-05, |
|
"loss": 2.238, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 1.792098864441825e-05, |
|
"loss": 2.2325, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 1.7181262446174615e-05, |
|
"loss": 2.2342, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 1.6451421835570044e-05, |
|
"loss": 2.2262, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 1.5731962044486608e-05, |
|
"loss": 2.2577, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 1.5023371260922157e-05, |
|
"loss": 2.2481, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 1.4326130297731294e-05, |
|
"loss": 2.229, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 1.3640712266370778e-05, |
|
"loss": 2.2365, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 1.2967582255870662e-05, |
|
"loss": 2.2276, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 1.2307197017249163e-05, |
|
"loss": 2.245, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 1.166000465358504e-05, |
|
"loss": 2.2327, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 1.1026444315958248e-05, |
|
"loss": 2.2257, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 1.0406945905464832e-05, |
|
"loss": 2.2355, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 9.801929781508377e-06, |
|
"loss": 2.2366, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 9.211806476565995e-06, |
|
"loss": 2.2307, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 8.63697641762235e-06, |
|
"loss": 2.2298, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 8.077829654460684e-06, |
|
"loss": 2.24, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 7.534745594995376e-06, |
|
"loss": 2.2349, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 7.00809274782543e-06, |
|
"loss": 2.2522, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 6.498228472183709e-06, |
|
"loss": 2.2253, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 6.00549873545155e-06, |
|
"loss": 2.2319, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 5.530237878403297e-06, |
|
"loss": 2.23, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 5.0727683883399965e-06, |
|
"loss": 2.2366, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 4.63340068026638e-06, |
|
"loss": 2.238, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 4.212432886259269e-06, |
|
"loss": 2.2238, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 3.8101506531707373e-06, |
|
"loss": 2.2308, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 3.426826948802979e-06, |
|
"loss": 2.2305, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 3.0627218766865635e-06, |
|
"loss": 2.2364, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 2.7180824995877015e-06, |
|
"loss": 2.2435, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 2.3931426718643005e-06, |
|
"loss": 2.2347, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 2.0881228807845587e-06, |
|
"loss": 2.2589, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 1.8032300969157738e-06, |
|
"loss": 2.2323, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 1.5386576336848828e-06, |
|
"loss": 2.2285, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 1.2945850162060271e-06, |
|
"loss": 2.2368, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 1.0711778594641267e-06, |
|
"loss": 2.2417, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 8.685877559371869e-07, |
|
"loss": 2.2355, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 6.869521727335145e-07, |
|
"loss": 2.2285, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 5.26394358313689e-07, |
|
"loss": 2.2262, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 3.8702325886057816e-07, |
|
"loss": 2.2289, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 2.6893344435409695e-07, |
|
"loss": 2.2428, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 1.722050444009702e-07, |
|
"loss": 2.2248, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 9.690369386293054e-08, |
|
"loss": 2.2297, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 4.308048832030403e-08, |
|
"loss": 2.2344, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 1.0771949401241265e-08, |
|
"loss": 2.2296, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 0.0, |
|
"loss": 2.2321, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.2356441020965576, |
|
"eval_runtime": 186.994, |
|
"eval_samples_per_second": 25.787, |
|
"eval_steps_per_second": 3.225, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 670, |
|
"total_flos": 3.038889647317975e+17, |
|
"train_loss": 2.275535515884855, |
|
"train_runtime": 10859.5951, |
|
"train_samples_per_second": 7.898, |
|
"train_steps_per_second": 0.062 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 670, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 3.038889647317975e+17, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|