|
{ |
|
"best_metric": 1.3314919471740723, |
|
"best_model_checkpoint": "./results/models/mistral-chem/checkpoint-384480", |
|
"epoch": 11.0, |
|
"eval_steps": 500, |
|
"global_step": 422928, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.013004577611319184, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 0.001999479816895547, |
|
"loss": 2.4533, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02600915522263837, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.0019989596337910945, |
|
"loss": 2.116, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03901373283395755, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 0.0019984394506866415, |
|
"loss": 2.0364, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.05201831044527674, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 0.001997919267582189, |
|
"loss": 1.9539, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06502288805659592, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.001997399084477736, |
|
"loss": 1.9176, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.0780274656679151, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.0019968789013732834, |
|
"loss": 1.8701, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.0910320432792343, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.001996358718268831, |
|
"loss": 1.8383, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.10403662089055347, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.001995838535164378, |
|
"loss": 1.8133, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.11704119850187265, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 0.0019953183520599253, |
|
"loss": 1.7976, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.13004577611319185, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0019947981689554723, |
|
"loss": 1.7758, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.14305035372451103, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0019942779858510198, |
|
"loss": 1.7529, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.1560549313358302, |
|
"grad_norm": 18.0, |
|
"learning_rate": 0.001993757802746567, |
|
"loss": 1.7465, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.16905950894714938, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0019932376196421142, |
|
"loss": 1.7295, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1820640865584686, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.0019927174365376612, |
|
"loss": 1.7239, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.19506866416978777, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.0019921972534332083, |
|
"loss": 1.7067, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.20807324178110695, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.0019916770703287557, |
|
"loss": 1.6868, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.22107781939242613, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.001991156887224303, |
|
"loss": 1.6772, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.2340823970037453, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 0.00199063670411985, |
|
"loss": 1.6766, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.24708697461506451, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 0.0019901165210153976, |
|
"loss": 1.6677, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.2600915522263837, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.0019895963379109446, |
|
"loss": 1.6503, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.2730961298377029, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.001989076154806492, |
|
"loss": 1.6424, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.28610070744902205, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.001988555971702039, |
|
"loss": 1.6428, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.29910528506034123, |
|
"grad_norm": 0.78125, |
|
"learning_rate": 0.0019880357885975865, |
|
"loss": 1.6369, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.3121098626716604, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 0.0019875156054931335, |
|
"loss": 1.6402, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.3251144402829796, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.001986995422388681, |
|
"loss": 1.6318, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.33811901789429877, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.001986475239284228, |
|
"loss": 1.62, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.351123595505618, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0019859550561797754, |
|
"loss": 1.6149, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.3641281731169372, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.0019854348730753224, |
|
"loss": 1.612, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.37713275072825636, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00198491468997087, |
|
"loss": 1.5996, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.39013732833957554, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.001984394506866417, |
|
"loss": 1.5997, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.4031419059508947, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0019838743237619643, |
|
"loss": 1.5949, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.4161464835622139, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.0019833541406575114, |
|
"loss": 1.5923, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.4291510611735331, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 0.001982833957553059, |
|
"loss": 1.596, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.44215563878485226, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.001982313774448606, |
|
"loss": 1.5886, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.45516021639617144, |
|
"grad_norm": 0.75, |
|
"learning_rate": 0.0019817935913441533, |
|
"loss": 1.5848, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.4681647940074906, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.0019812734082397003, |
|
"loss": 1.5799, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.4811693716188098, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0019807532251352477, |
|
"loss": 1.5777, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.49417394923012903, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 0.0019802330420307947, |
|
"loss": 1.5786, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.5071785268414482, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.001979712858926342, |
|
"loss": 1.5661, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.5201831044527674, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.0019791926758218896, |
|
"loss": 1.5727, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.5331876820640865, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0019786724927174366, |
|
"loss": 1.5709, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.5461922596754057, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.001978152309612984, |
|
"loss": 1.5684, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.559196837286725, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.001977632126508531, |
|
"loss": 1.5595, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.5722014148980441, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.001977111943404078, |
|
"loss": 1.5566, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.5852059925093633, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0019765917602996255, |
|
"loss": 1.5531, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.5982105701206825, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.0019760715771951726, |
|
"loss": 1.5465, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.6112151477320017, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00197555139409072, |
|
"loss": 1.5437, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.6242197253433208, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.001975031210986267, |
|
"loss": 1.539, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.63722430295464, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0019745110278818145, |
|
"loss": 1.5365, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.6502288805659592, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.0019739908447773615, |
|
"loss": 1.5384, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.6632334581772784, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.001973470661672909, |
|
"loss": 1.5352, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.6762380357885975, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0019729504785684564, |
|
"loss": 1.5328, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.6892426133999168, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.0019724302954640034, |
|
"loss": 1.5405, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.702247191011236, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.001971910112359551, |
|
"loss": 1.5336, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.7152517686225551, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.001971389929255098, |
|
"loss": 1.5275, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.7282563462338744, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0019708697461506453, |
|
"loss": 1.5269, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.7412609238451935, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0019703495630461923, |
|
"loss": 1.5259, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.7542655014565127, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0019698293799417393, |
|
"loss": 1.526, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.7672700790678318, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0019693091968372867, |
|
"loss": 1.5232, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.7802746566791511, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0019687890137328337, |
|
"loss": 1.515, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.7932792342904702, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 0.001968268830628381, |
|
"loss": 1.5135, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.8062838119017894, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0019677486475239286, |
|
"loss": 1.5105, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.8192883895131086, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0019672284644194757, |
|
"loss": 1.5049, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.8322929671244278, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.001966708281315023, |
|
"loss": 1.5068, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.845297544735747, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00196618809821057, |
|
"loss": 1.5106, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.8583021223470662, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0019656679151061176, |
|
"loss": 1.5025, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.8713066999583854, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0019651477320016646, |
|
"loss": 1.5003, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.8843112775697045, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.001964627548897212, |
|
"loss": 1.5008, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.8973158551810237, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.001964107365792759, |
|
"loss": 1.4981, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.9103204327923429, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0019635871826883065, |
|
"loss": 1.5051, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.9233250104036621, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0019630669995838535, |
|
"loss": 1.497, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.9363295880149812, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.0019625468164794005, |
|
"loss": 1.4988, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.9493341656263005, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.001962026633374948, |
|
"loss": 1.4969, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.9623387432376196, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0019615064502704954, |
|
"loss": 1.4962, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.9753433208489388, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0019609862671660424, |
|
"loss": 1.4917, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.9883478984602581, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00196046608406159, |
|
"loss": 1.4911, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.4885175228118896, |
|
"eval_runtime": 0.5947, |
|
"eval_samples_per_second": 1681.496, |
|
"eval_steps_per_second": 6.726, |
|
"step": 38448 |
|
}, |
|
{ |
|
"epoch": 1.0013524760715773, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.001959945900957137, |
|
"loss": 1.4874, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.0143570536828963, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 0.0019594257178526843, |
|
"loss": 1.4894, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.0273616312942155, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0019589055347482313, |
|
"loss": 1.4833, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 1.0403662089055348, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0019583853516437788, |
|
"loss": 1.4811, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.053370786516854, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0019578651685393258, |
|
"loss": 1.4804, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.066375364128173, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 0.001957344985434873, |
|
"loss": 1.4834, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.0793799417394923, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0019568248023304202, |
|
"loss": 1.4792, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.0923845193508115, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0019563046192259677, |
|
"loss": 1.4794, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.1053890969621307, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.001955784436121515, |
|
"loss": 1.4749, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.1183936745734497, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.001955264253017062, |
|
"loss": 1.4761, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.131398252184769, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 0.001954744069912609, |
|
"loss": 1.4754, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.1444028297960882, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0019542238868081566, |
|
"loss": 1.4757, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.1574074074074074, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0019537037037037036, |
|
"loss": 1.4703, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.1704119850187267, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.001953183520599251, |
|
"loss": 1.472, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.1834165626300457, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0019526633374947983, |
|
"loss": 1.4684, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.196421140241365, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0019521431543903455, |
|
"loss": 1.467, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.2094257178526842, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0019516229712858925, |
|
"loss": 1.4632, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.2224302954640034, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.0019511027881814397, |
|
"loss": 1.4652, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.2354348730753224, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.001950582605076987, |
|
"loss": 1.4627, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.2484394506866416, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0019500624219725344, |
|
"loss": 1.463, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.2614440282979609, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.0019495422388680816, |
|
"loss": 1.4594, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.27444860590928, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0019490220557636289, |
|
"loss": 1.4627, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.2874531835205993, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.001948501872659176, |
|
"loss": 1.4643, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.3004577611319184, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.0019479816895547233, |
|
"loss": 1.4599, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.3134623387432376, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0019474615064502706, |
|
"loss": 1.4617, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.3264669163545568, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 0.0019469413233458178, |
|
"loss": 1.4599, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.339471493965876, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.001946421140241365, |
|
"loss": 1.4571, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.352476071577195, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.0019459009571369122, |
|
"loss": 1.4584, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.3654806491885143, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0019453807740324595, |
|
"loss": 1.4572, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.3784852267998335, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0019448605909280067, |
|
"loss": 1.4542, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.3914898044111528, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0019443404078235541, |
|
"loss": 1.4551, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.404494382022472, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0019438202247191011, |
|
"loss": 1.4539, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.417498959633791, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 0.0019433000416146484, |
|
"loss": 1.4542, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.4305035372451103, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.0019427798585101956, |
|
"loss": 1.4509, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.4435081148564295, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0019422596754057428, |
|
"loss": 1.4532, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.4565126924677487, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00194173949230129, |
|
"loss": 1.4533, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.4695172700790677, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0019412193091968373, |
|
"loss": 1.4516, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.482521847690387, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.0019406991260923845, |
|
"loss": 1.4513, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.4955264253017062, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0019401789429879317, |
|
"loss": 1.4496, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.5085310029130254, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.001939658759883479, |
|
"loss": 1.4431, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.5215355805243447, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.0019391385767790262, |
|
"loss": 1.4508, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.534540158135664, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.0019386183936745737, |
|
"loss": 1.4533, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.547544735746983, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0019380982105701209, |
|
"loss": 1.4468, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.5605493133583022, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.001937578027465668, |
|
"loss": 1.4448, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.5735538909696212, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0019370578443612153, |
|
"loss": 1.4444, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.5865584685809404, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0019365376612567623, |
|
"loss": 1.4446, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.5995630461922596, |
|
"grad_norm": 1.375, |
|
"learning_rate": 0.0019360174781523096, |
|
"loss": 1.4445, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.6125676238035789, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0019354972950478568, |
|
"loss": 1.4412, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.625572201414898, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.001934977111943404, |
|
"loss": 1.443, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.6385767790262173, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.0019344569288389513, |
|
"loss": 1.4407, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.6515813566375364, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0019339367457344985, |
|
"loss": 1.4373, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.6645859342488556, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 0.0019334165626300457, |
|
"loss": 1.434, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.6775905118601748, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.001932896379525593, |
|
"loss": 1.4394, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.6905950894714938, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0019323761964211404, |
|
"loss": 1.4383, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.703599667082813, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0019318560133166876, |
|
"loss": 1.4376, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.7166042446941323, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.0019313358302122348, |
|
"loss": 1.4382, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.7296088223054515, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.001930815647107782, |
|
"loss": 1.437, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.7426133999167708, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0019302954640033293, |
|
"loss": 1.4395, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.75561797752809, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0019297752808988765, |
|
"loss": 1.4351, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.768622555139409, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 0.0019292550977944235, |
|
"loss": 1.4419, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.7816271327507283, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.0019287349146899708, |
|
"loss": 1.4385, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.7946317103620475, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 0.001928214731585518, |
|
"loss": 1.4342, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.8076362879733665, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.0019276945484810652, |
|
"loss": 1.4363, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.8206408655846857, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.0019271743653766125, |
|
"loss": 1.4361, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.833645443196005, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00192665418227216, |
|
"loss": 1.4333, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.8466500208073242, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 0.0019261339991677071, |
|
"loss": 1.4342, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.8596545984186434, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.0019256138160632544, |
|
"loss": 1.4334, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.8726591760299627, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0019250936329588016, |
|
"loss": 1.433, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.8856637536412817, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0019245734498543488, |
|
"loss": 1.4269, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.898668331252601, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.001924053266749896, |
|
"loss": 1.4313, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.91167290886392, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0019235330836454433, |
|
"loss": 1.4354, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.9246774864752392, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0019230129005409905, |
|
"loss": 1.4304, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.9376820640865584, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 0.0019224927174365377, |
|
"loss": 1.4238, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.9506866416978776, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.001921972534332085, |
|
"loss": 1.4313, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.9636912193091969, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.001921452351227632, |
|
"loss": 1.4266, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.9766957969205161, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0019209321681231794, |
|
"loss": 1.425, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.9897003745318353, |
|
"grad_norm": 2.75, |
|
"learning_rate": 0.0019204119850187266, |
|
"loss": 1.4248, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.4245445728302002, |
|
"eval_runtime": 0.5732, |
|
"eval_samples_per_second": 1744.482, |
|
"eval_steps_per_second": 6.978, |
|
"step": 76896 |
|
}, |
|
{ |
|
"epoch": 2.0027049521431546, |
|
"grad_norm": 20.5, |
|
"learning_rate": 0.0019198918019142739, |
|
"loss": 1.424, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 2.0157095297544734, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.001919371618809821, |
|
"loss": 1.4238, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 2.0287141073657926, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0019188514357053683, |
|
"loss": 1.421, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 2.041718684977112, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0019183312526009156, |
|
"loss": 1.4216, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 2.054723262588431, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.0019178110694964628, |
|
"loss": 1.4202, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 2.0677278401997503, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00191729088639201, |
|
"loss": 1.4181, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 2.0807324178110695, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0019167707032875572, |
|
"loss": 1.4194, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.093736995422389, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.0019162505201831045, |
|
"loss": 1.4196, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 2.106741573033708, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0019157303370786517, |
|
"loss": 1.4172, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.1197461506450272, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0019152101539741991, |
|
"loss": 1.4181, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 2.132750728256346, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0019146899708697464, |
|
"loss": 1.4152, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.1457553058676653, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0019141697877652934, |
|
"loss": 1.4141, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.1587598834789845, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0019136496046608406, |
|
"loss": 1.417, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.1717644610903037, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 0.0019131294215563878, |
|
"loss": 1.4131, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 2.184769038701623, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.001912609238451935, |
|
"loss": 1.4168, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.197773616312942, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0019120890553474823, |
|
"loss": 1.4158, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.2107781939242614, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.0019115688722430295, |
|
"loss": 1.4125, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.2237827715355807, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 0.0019110486891385768, |
|
"loss": 1.4114, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.2367873491468995, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.001910528506034124, |
|
"loss": 1.4156, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.2497919267582187, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0019100083229296712, |
|
"loss": 1.4146, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.262796504369538, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0019094881398252184, |
|
"loss": 1.4142, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.275801081980857, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0019089679567207659, |
|
"loss": 1.4153, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.2888056595921764, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0019084477736163131, |
|
"loss": 1.4189, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.3018102372034956, |
|
"grad_norm": 4.875, |
|
"learning_rate": 0.0019079275905118603, |
|
"loss": 1.4152, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.314814814814815, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 0.0019074074074074076, |
|
"loss": 1.4148, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.327819392426134, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0019068872243029546, |
|
"loss": 1.4115, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.3408239700374533, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.0019063670411985018, |
|
"loss": 1.4131, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.3538285476487726, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.001905846858094049, |
|
"loss": 1.4085, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.3668331252600914, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0019053266749895963, |
|
"loss": 1.4096, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.3798377028714106, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0019048064918851435, |
|
"loss": 1.4128, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.39284228048273, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0019042863087806907, |
|
"loss": 1.4102, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.405846858094049, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.001903766125676238, |
|
"loss": 1.405, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.4188514357053683, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0019032459425717854, |
|
"loss": 1.4061, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.4318560133166875, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 0.0019027257594673326, |
|
"loss": 1.4051, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.444860590928007, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0019022055763628799, |
|
"loss": 1.4044, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.457865168539326, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.001901685393258427, |
|
"loss": 1.403, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.470869746150645, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.0019011652101539743, |
|
"loss": 1.4064, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.483874323761964, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0019006450270495215, |
|
"loss": 1.4027, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.4968789013732833, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0019001248439450688, |
|
"loss": 1.4034, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.5098834789846025, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.001899604660840616, |
|
"loss": 1.4037, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.5228880565959217, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.001899084477736163, |
|
"loss": 1.4087, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.535892634207241, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 0.0018985642946317102, |
|
"loss": 1.4119, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.54889721181856, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.0018980441115272575, |
|
"loss": 1.409, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.5619017894298795, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.001897523928422805, |
|
"loss": 1.4066, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.5749063670411987, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0018970037453183521, |
|
"loss": 1.4026, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.587910944652518, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.0018964835622138994, |
|
"loss": 1.4052, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.6009155222638367, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.0018959633791094466, |
|
"loss": 1.4065, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.613920099875156, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0018954431960049938, |
|
"loss": 1.4009, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.626924677486475, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.001894923012900541, |
|
"loss": 1.4077, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.6399292550977944, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0018944028297960883, |
|
"loss": 1.4046, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.6529338327091136, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0018938826466916355, |
|
"loss": 1.4058, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.665938410320433, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 0.0018933624635871827, |
|
"loss": 1.4028, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.678942987931752, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00189284228048273, |
|
"loss": 1.3995, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.691947565543071, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.0018923220973782772, |
|
"loss": 1.4054, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.70495214315439, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0018918019142738244, |
|
"loss": 1.407, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.7179567207657094, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0018912817311693717, |
|
"loss": 1.4034, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.7309612983770286, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0018907615480649189, |
|
"loss": 1.4037, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.743965875988348, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0018902413649604661, |
|
"loss": 1.4019, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.756970453599667, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0018897211818560133, |
|
"loss": 1.3987, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.7699750312109863, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 0.0018892009987515606, |
|
"loss": 1.4019, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.7829796088223056, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 0.0018886808156471078, |
|
"loss": 1.4029, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.795984186433625, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.001888160632542655, |
|
"loss": 1.4018, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.808988764044944, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0018876404494382023, |
|
"loss": 1.3986, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.8219933416562633, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 0.0018871202663337495, |
|
"loss": 1.4031, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.834997919267582, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.0018866000832292967, |
|
"loss": 1.4046, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.8480024968789013, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0018860799001248442, |
|
"loss": 1.4029, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.8610070744902205, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0018855597170203914, |
|
"loss": 1.399, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.8740116521015397, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0018850395339159386, |
|
"loss": 1.4, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.887016229712859, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0018845193508114856, |
|
"loss": 1.3969, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.900020807324178, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 0.0018839991677070329, |
|
"loss": 1.3984, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.9130253849354975, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00188347898460258, |
|
"loss": 1.4017, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.9260299625468162, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.0018829588014981273, |
|
"loss": 1.3957, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.9390345401581355, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0018824386183936745, |
|
"loss": 1.3974, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.9520391177694547, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0018819184352892218, |
|
"loss": 1.3972, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.965043695380774, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.001881398252184769, |
|
"loss": 1.3994, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.978048272992093, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0018808780690803162, |
|
"loss": 1.3966, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.9910528506034124, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.0018803578859758635, |
|
"loss": 1.3966, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.3958898782730103, |
|
"eval_runtime": 0.5647, |
|
"eval_samples_per_second": 1770.912, |
|
"eval_steps_per_second": 7.084, |
|
"step": 115344 |
|
}, |
|
{ |
|
"epoch": 3.0040574282147317, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.001879837702871411, |
|
"loss": 1.3932, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 3.017062005826051, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0018793175197669581, |
|
"loss": 1.3914, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 3.03006658343737, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.0018787973366625054, |
|
"loss": 1.389, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 3.0430711610486894, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0018782771535580526, |
|
"loss": 1.3895, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 3.056075738660008, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0018777569704535998, |
|
"loss": 1.3904, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 3.0690803162713274, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.001877236787349147, |
|
"loss": 1.3905, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 3.0820848938826466, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.001876716604244694, |
|
"loss": 1.3899, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 3.095089471493966, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 0.0018761964211402413, |
|
"loss": 1.386, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 3.108094049105285, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0018756762380357885, |
|
"loss": 1.3859, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 3.1210986267166043, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.0018751560549313357, |
|
"loss": 1.3893, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 3.1341032043279236, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.001874635871826883, |
|
"loss": 1.3883, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 3.147107781939243, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0018741156887224304, |
|
"loss": 1.3898, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 3.1601123595505616, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.0018735955056179776, |
|
"loss": 1.3894, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 3.173116937161881, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0018730753225135249, |
|
"loss": 1.3903, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 3.1861215147732, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.001872555139409072, |
|
"loss": 1.3873, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 3.1991260923845193, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0018720349563046193, |
|
"loss": 1.3857, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 3.2121306699958385, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0018715147732001666, |
|
"loss": 1.3907, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 3.2251352476071578, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0018709945900957138, |
|
"loss": 1.3858, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 3.238139825218477, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.001870474406991261, |
|
"loss": 1.3873, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 3.251144402829796, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0018699542238868082, |
|
"loss": 1.3846, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 3.2641489804411155, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0018694340407823553, |
|
"loss": 1.3891, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 3.2771535580524347, |
|
"grad_norm": 7.5, |
|
"learning_rate": 0.0018689138576779025, |
|
"loss": 1.3882, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 3.2901581356637535, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00186839367457345, |
|
"loss": 1.3879, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 3.3031627132750727, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0018678734914689972, |
|
"loss": 1.3861, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 3.316167290886392, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 0.0018673533083645444, |
|
"loss": 1.3863, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 3.329171868497711, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0018668331252600916, |
|
"loss": 1.3832, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 3.3421764461090304, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0018663129421556388, |
|
"loss": 1.3859, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 3.3551810237203497, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.001865792759051186, |
|
"loss": 1.3859, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 3.368185601331669, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.0018652725759467333, |
|
"loss": 1.3882, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 3.381190178942988, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0018647523928422805, |
|
"loss": 1.3863, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 3.394194756554307, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0018642322097378278, |
|
"loss": 1.3866, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 3.407199334165626, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.001863712026633375, |
|
"loss": 1.3863, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 3.4202039117769454, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0018631918435289222, |
|
"loss": 1.3839, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 3.4332084893882646, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 0.0018626716604244697, |
|
"loss": 1.3842, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 3.446213066999584, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.0018621514773200167, |
|
"loss": 1.384, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 3.459217644610903, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.001861631294215564, |
|
"loss": 1.3835, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 3.4722222222222223, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0018611111111111111, |
|
"loss": 1.3857, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 3.4852267998335416, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.0018605909280066584, |
|
"loss": 1.3822, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 3.498231377444861, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0018600707449022056, |
|
"loss": 1.3798, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 3.51123595505618, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0018595505617977528, |
|
"loss": 1.3806, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 3.524240532667499, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0018590303786933, |
|
"loss": 1.3825, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 3.537245110278818, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.0018585101955888473, |
|
"loss": 1.3822, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 3.5502496878901373, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0018579900124843945, |
|
"loss": 1.3837, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 3.5632542655014565, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0018574698293799417, |
|
"loss": 1.3789, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 3.5762588431127758, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.001856949646275489, |
|
"loss": 1.3798, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 3.589263420724095, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0018564294631710364, |
|
"loss": 1.3812, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 3.6022679983354142, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 0.0018559092800665836, |
|
"loss": 1.3765, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 3.615272575946733, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0018553890969621309, |
|
"loss": 1.3759, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 3.6282771535580522, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0018548689138576779, |
|
"loss": 1.382, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 3.6412817311693715, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 0.001854348730753225, |
|
"loss": 1.3796, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 3.6542863087806907, |
|
"grad_norm": 14.375, |
|
"learning_rate": 0.0018538285476487723, |
|
"loss": 1.3818, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 3.66729088639201, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 0.0018533083645443196, |
|
"loss": 1.3825, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 3.680295464003329, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.0018527881814398668, |
|
"loss": 1.3821, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 3.6933000416146484, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 0.001852267998335414, |
|
"loss": 1.3776, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 3.7063046192259677, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0018517478152309612, |
|
"loss": 1.3791, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 3.719309196837287, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 0.0018512276321265085, |
|
"loss": 1.3764, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 3.732313774448606, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.001850707449022056, |
|
"loss": 1.3773, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 3.7453183520599254, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0018501872659176031, |
|
"loss": 1.3803, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 3.758322929671244, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0018496670828131504, |
|
"loss": 1.3776, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 3.7713275072825634, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0018491468997086976, |
|
"loss": 1.3758, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 3.7843320848938826, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0018486267166042448, |
|
"loss": 1.3779, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 3.797336662505202, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.001848106533499792, |
|
"loss": 1.3788, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 3.810341240116521, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0018475863503953393, |
|
"loss": 1.3766, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 3.8233458177278403, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0018470661672908863, |
|
"loss": 1.3764, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 3.836350395339159, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0018465459841864335, |
|
"loss": 1.378, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 3.8493549729504783, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0018460258010819808, |
|
"loss": 1.3738, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 3.8623595505617976, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 0.001845505617977528, |
|
"loss": 1.3739, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 3.875364128173117, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0018449854348730754, |
|
"loss": 1.3743, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 3.888368705784436, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0018444652517686227, |
|
"loss": 1.3736, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 3.9013732833957553, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 0.0018439450686641699, |
|
"loss": 1.3727, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 3.9143778610070745, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.0018434248855597171, |
|
"loss": 1.3772, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 3.9273824386183938, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.0018429047024552643, |
|
"loss": 1.3708, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 3.940387016229713, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0018423845193508116, |
|
"loss": 1.3735, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 3.9533915938410322, |
|
"grad_norm": 0.875, |
|
"learning_rate": 0.0018418643362463588, |
|
"loss": 1.3735, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 3.9663961714523515, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.001841344153141906, |
|
"loss": 1.3741, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 3.9794007490636703, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0018408239700374533, |
|
"loss": 1.3725, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 3.9924053266749895, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 0.0018403037869330005, |
|
"loss": 1.3764, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.380986213684082, |
|
"eval_runtime": 0.7298, |
|
"eval_samples_per_second": 1370.29, |
|
"eval_steps_per_second": 5.481, |
|
"step": 153792 |
|
}, |
|
{ |
|
"epoch": 4.005409904286309, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.0018397836038285475, |
|
"loss": 1.3757, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 4.018414481897628, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 0.001839263420724095, |
|
"loss": 1.3727, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 4.031419059508947, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0018387432376196422, |
|
"loss": 1.3686, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 4.044423637120266, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0018382230545151894, |
|
"loss": 1.3683, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 4.057428214731585, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0018377028714107366, |
|
"loss": 1.371, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 4.0704327923429044, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0018371826883062839, |
|
"loss": 1.3706, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 4.083437369954224, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 0.001836662505201831, |
|
"loss": 1.3682, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 4.096441947565543, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0018361423220973783, |
|
"loss": 1.3709, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 4.109446525176862, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0018356221389929255, |
|
"loss": 1.3705, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 4.122451102788181, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0018351019558884728, |
|
"loss": 1.3714, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 4.135455680399501, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 0.00183458177278402, |
|
"loss": 1.3688, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 4.14846025801082, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0018340615896795672, |
|
"loss": 1.3723, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 4.161464835622139, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 0.0018335414065751145, |
|
"loss": 1.3697, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 4.174469413233458, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.001833021223470662, |
|
"loss": 1.3684, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 4.187473990844778, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.001832501040366209, |
|
"loss": 1.3704, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 4.200478568456097, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0018319808572617561, |
|
"loss": 1.3705, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 4.213483146067416, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 0.0018314606741573034, |
|
"loss": 1.3744, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 4.226487723678735, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0018309404910528506, |
|
"loss": 1.3663, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 4.2394923012900545, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0018304203079483978, |
|
"loss": 1.3706, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 4.252496878901374, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.001829900124843945, |
|
"loss": 1.37, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 4.265501456512692, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 0.0018293799417394923, |
|
"loss": 1.3669, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 4.278506034124011, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 0.0018288597586350395, |
|
"loss": 1.3674, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 4.2915106117353305, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0018283395755305867, |
|
"loss": 1.3672, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 4.30451518934665, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 0.001827819392426134, |
|
"loss": 1.37, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 4.317519766957969, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 0.0018272992093216814, |
|
"loss": 1.3724, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 4.330524344569288, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0018267790262172286, |
|
"loss": 1.3687, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 4.3435289221806075, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0018262588431127759, |
|
"loss": 1.3676, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 4.356533499791927, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.001825738660008323, |
|
"loss": 1.3672, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 4.369538077403246, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 0.0018252184769038703, |
|
"loss": 1.3675, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 4.382542655014565, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0018246982937994173, |
|
"loss": 1.3682, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 4.395547232625884, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 0.0018241781106949646, |
|
"loss": 1.3675, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 4.408551810237204, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0018236579275905118, |
|
"loss": 1.3675, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 4.421556387848523, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.001823137744486059, |
|
"loss": 1.367, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 4.434560965459842, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0018226175613816062, |
|
"loss": 1.3729, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 4.447565543071161, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0018220973782771535, |
|
"loss": 1.3654, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 4.460570120682481, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.001821577195172701, |
|
"loss": 1.3694, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 4.473574698293799, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.0018210570120682482, |
|
"loss": 1.3647, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 4.486579275905118, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0018205368289637954, |
|
"loss": 1.369, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 4.499583853516437, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.0018200166458593426, |
|
"loss": 1.3665, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 4.512588431127757, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0018194964627548898, |
|
"loss": 1.3661, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 4.525593008739076, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.001818976279650437, |
|
"loss": 1.3649, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 4.538597586350395, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0018184560965459843, |
|
"loss": 1.3667, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 4.551602163961714, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.0018179359134415315, |
|
"loss": 1.3658, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 4.564606741573034, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0018174157303370785, |
|
"loss": 1.3655, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 4.577611319184353, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.0018168955472326258, |
|
"loss": 1.3638, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 4.590615896795672, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 0.001816375364128173, |
|
"loss": 1.3621, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 4.603620474406991, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0018158551810237204, |
|
"loss": 1.3649, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 4.6166250520183105, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.0018153349979192677, |
|
"loss": 1.3616, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 4.62962962962963, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 0.001814814814814815, |
|
"loss": 1.3617, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 4.642634207240949, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0018142946317103621, |
|
"loss": 1.3639, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 4.655638784852268, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 0.0018137744486059093, |
|
"loss": 1.3641, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 4.6686433624635875, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0018132542655014566, |
|
"loss": 1.3652, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 4.681647940074907, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.0018127340823970038, |
|
"loss": 1.3633, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 4.694652517686226, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 0.001812213899292551, |
|
"loss": 1.3624, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 4.707657095297545, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0018116937161880983, |
|
"loss": 1.3658, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 4.720661672908864, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.0018111735330836455, |
|
"loss": 1.3636, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 4.733666250520183, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0018106533499791927, |
|
"loss": 1.36, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 4.746670828131502, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00181013316687474, |
|
"loss": 1.3631, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 4.759675405742821, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 0.0018096129837702872, |
|
"loss": 1.3647, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 4.7726799833541405, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0018090928006658344, |
|
"loss": 1.3589, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 4.78568456096546, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0018085726175613816, |
|
"loss": 1.3616, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 4.798689138576779, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.0018080524344569289, |
|
"loss": 1.3632, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 4.811693716188098, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 0.001807532251352476, |
|
"loss": 1.3643, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 4.824698293799417, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0018070120682480233, |
|
"loss": 1.3632, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 4.837702871410737, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0018064918851435705, |
|
"loss": 1.361, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 4.850707449022056, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0018059717020391178, |
|
"loss": 1.3622, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 4.863712026633375, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.001805451518934665, |
|
"loss": 1.362, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 4.876716604244694, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0018049313358302122, |
|
"loss": 1.3623, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 4.889721181856014, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 0.0018044111527257595, |
|
"loss": 1.3599, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 4.902725759467333, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.001803890969621307, |
|
"loss": 1.3636, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 4.915730337078652, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0018033707865168541, |
|
"loss": 1.3609, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 4.92873491468997, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0018028506034124014, |
|
"loss": 1.3603, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 4.94173949230129, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.0018023304203079484, |
|
"loss": 1.3601, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 4.954744069912609, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0018018102372034956, |
|
"loss": 1.3569, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 4.967748647523928, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0018012900540990428, |
|
"loss": 1.358, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 4.980753225135247, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 0.00180076987099459, |
|
"loss": 1.3577, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 4.9937578027465666, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.0018002496878901373, |
|
"loss": 1.3618, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.3649697303771973, |
|
"eval_runtime": 0.5987, |
|
"eval_samples_per_second": 1670.164, |
|
"eval_steps_per_second": 6.681, |
|
"step": 192240 |
|
}, |
|
{ |
|
"epoch": 5.006762380357886, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0017997295047856845, |
|
"loss": 1.3587, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 5.019766957969205, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.0017992093216812317, |
|
"loss": 1.3542, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 5.032771535580524, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.001798689138576779, |
|
"loss": 1.3558, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 5.0457761131918435, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0017981689554723264, |
|
"loss": 1.3563, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 5.058780690803163, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.0017976487723678736, |
|
"loss": 1.3549, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 5.071785268414482, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.0017971285892634209, |
|
"loss": 1.3541, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 5.084789846025801, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.001796608406158968, |
|
"loss": 1.3555, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 5.09779442363712, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 0.0017960882230545153, |
|
"loss": 1.3536, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 5.11079900124844, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0017955680399500626, |
|
"loss": 1.3523, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 5.123803578859759, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0017950478568456096, |
|
"loss": 1.3543, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 5.136808156471078, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0017945276737411568, |
|
"loss": 1.3544, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 5.149812734082397, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.001794007490636704, |
|
"loss": 1.3546, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 5.162817311693717, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.0017934873075322513, |
|
"loss": 1.3536, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 5.175821889305035, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0017929671244277985, |
|
"loss": 1.3516, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 5.188826466916354, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.001792446941323346, |
|
"loss": 1.353, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 5.201831044527673, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0017919267582188932, |
|
"loss": 1.3541, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 5.214835622138993, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0017914065751144404, |
|
"loss": 1.3511, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 5.227840199750312, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.0017908863920099876, |
|
"loss": 1.3531, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 5.240844777361631, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.0017903662089055348, |
|
"loss": 1.3527, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 5.25384935497295, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.001789846025801082, |
|
"loss": 1.3544, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 5.26685393258427, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0017893258426966293, |
|
"loss": 1.35, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 5.279858510195589, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0017888056595921765, |
|
"loss": 1.3526, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 5.292863087806908, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0017882854764877238, |
|
"loss": 1.3583, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 5.305867665418227, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0017877652933832708, |
|
"loss": 1.3553, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 5.3188722430295465, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.001787245110278818, |
|
"loss": 1.3526, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 5.331876820640866, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0017867249271743654, |
|
"loss": 1.3538, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 5.344881398252185, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0017862047440699127, |
|
"loss": 1.351, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 5.357885975863504, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00178568456096546, |
|
"loss": 1.3513, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 5.3708905534748235, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0017851643778610071, |
|
"loss": 1.3508, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 5.383895131086143, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0017846441947565544, |
|
"loss": 1.3511, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 5.396899708697461, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0017841240116521016, |
|
"loss": 1.3536, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 5.40990428630878, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0017836038285476488, |
|
"loss": 1.3534, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 5.4229088639200995, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.001783083645443196, |
|
"loss": 1.3532, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 5.435913441531419, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0017825634623387433, |
|
"loss": 1.3528, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 5.448918019142738, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.0017820432792342905, |
|
"loss": 1.3511, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 5.461922596754057, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0017815230961298377, |
|
"loss": 1.3538, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 5.4749271743653765, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.001781002913025385, |
|
"loss": 1.354, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 5.487931751976696, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0017804827299209324, |
|
"loss": 1.353, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 5.500936329588015, |
|
"grad_norm": 16.875, |
|
"learning_rate": 0.0017799625468164794, |
|
"loss": 1.3542, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 5.513940907199334, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0017794423637120266, |
|
"loss": 1.3551, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 5.526945484810653, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0017789221806075739, |
|
"loss": 1.3531, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 5.539950062421973, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.001778401997503121, |
|
"loss": 1.3545, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 5.552954640033292, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 0.0017778818143986683, |
|
"loss": 1.3521, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 5.565959217644611, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.0017773616312942156, |
|
"loss": 1.3552, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 5.57896379525593, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.0017768414481897628, |
|
"loss": 1.3549, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 5.59196837286725, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00177632126508531, |
|
"loss": 1.3519, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 5.604972950478569, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0017758010819808572, |
|
"loss": 1.351, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 5.617977528089888, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.0017752808988764045, |
|
"loss": 1.3514, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 5.630982105701207, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.001774760715771952, |
|
"loss": 1.3471, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 5.643986683312526, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0017742405326674991, |
|
"loss": 1.3486, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 5.656991260923845, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0017737203495630464, |
|
"loss": 1.3508, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 5.669995838535164, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.0017732001664585936, |
|
"loss": 1.3506, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 5.683000416146483, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0017726799833541406, |
|
"loss": 1.3505, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 5.696004993757803, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0017721598002496878, |
|
"loss": 1.3473, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 5.709009571369122, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.001771639617145235, |
|
"loss": 1.3509, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 5.722014148980441, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.0017711194340407823, |
|
"loss": 1.3531, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 5.73501872659176, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0017705992509363295, |
|
"loss": 1.3525, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 5.7480233042030795, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 0.0017700790678318768, |
|
"loss": 1.3504, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 5.761027881814399, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.001769558884727424, |
|
"loss": 1.3499, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 5.774032459425718, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 0.0017690387016229714, |
|
"loss": 1.3504, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 5.787037037037037, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.0017685185185185187, |
|
"loss": 1.3496, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 5.800041614648356, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0017679983354140659, |
|
"loss": 1.3476, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 5.813046192259676, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0017674781523096131, |
|
"loss": 1.351, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 5.826050769870995, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0017669579692051603, |
|
"loss": 1.3493, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 5.839055347482314, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0017664377861007076, |
|
"loss": 1.3475, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 5.8520599250936325, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0017659176029962548, |
|
"loss": 1.3501, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 5.865064502704952, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0017653974198918018, |
|
"loss": 1.3494, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 5.878069080316271, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.001764877236787349, |
|
"loss": 1.3488, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 5.89107365792759, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0017643570536828963, |
|
"loss": 1.3511, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 5.904078235538909, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0017638368705784435, |
|
"loss": 1.3494, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 5.917082813150229, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.001763316687473991, |
|
"loss": 1.3492, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 5.930087390761548, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0017627965043695382, |
|
"loss": 1.3471, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 5.943091968372867, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0017622763212650854, |
|
"loss": 1.3481, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 5.956096545984186, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0017617561381606326, |
|
"loss": 1.3475, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 5.969101123595506, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0017612359550561799, |
|
"loss": 1.3471, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 5.982105701206825, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.001760715771951727, |
|
"loss": 1.3469, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 5.995110278818144, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0017601955888472743, |
|
"loss": 1.3476, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.3531179428100586, |
|
"eval_runtime": 0.5767, |
|
"eval_samples_per_second": 1734.115, |
|
"eval_steps_per_second": 6.936, |
|
"step": 230688 |
|
}, |
|
{ |
|
"epoch": 6.008114856429463, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0017596754057428215, |
|
"loss": 1.3445, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 6.0211194340407825, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0017591552226383688, |
|
"loss": 1.3454, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 6.034124011652102, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.001758635039533916, |
|
"loss": 1.3453, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 6.047128589263421, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.001758114856429463, |
|
"loss": 1.3443, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 6.06013316687474, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0017575946733250102, |
|
"loss": 1.3408, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 6.0731377444860595, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0017570744902205577, |
|
"loss": 1.3445, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 6.086142322097379, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.001756554307116105, |
|
"loss": 1.3459, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 6.099146899708697, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0017560341240116521, |
|
"loss": 1.3447, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 6.112151477320016, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0017555139409071994, |
|
"loss": 1.3434, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 6.1251560549313355, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0017549937578027466, |
|
"loss": 1.3462, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 6.138160632542655, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.0017544735746982938, |
|
"loss": 1.345, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 6.151165210153974, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.001753953391593841, |
|
"loss": 1.3433, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 6.164169787765293, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0017534332084893883, |
|
"loss": 1.3421, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 6.1771743653766125, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0017529130253849355, |
|
"loss": 1.3441, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 6.190178942987932, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0017523928422804827, |
|
"loss": 1.3456, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 6.203183520599251, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00175187265917603, |
|
"loss": 1.3457, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 6.21618809821057, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0017513524760715774, |
|
"loss": 1.3447, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 6.229192675821889, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0017508322929671246, |
|
"loss": 1.3445, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 6.242197253433209, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0017503121098626717, |
|
"loss": 1.3462, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 6.255201831044528, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.0017497919267582189, |
|
"loss": 1.3478, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 6.268206408655847, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0017492717436537661, |
|
"loss": 1.3448, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 6.281210986267166, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.0017487515605493133, |
|
"loss": 1.3425, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 6.294215563878486, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 0.0017482313774448606, |
|
"loss": 1.345, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 6.307220141489805, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.0017477111943404078, |
|
"loss": 1.341, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 6.320224719101123, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.001747191011235955, |
|
"loss": 1.3441, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 6.333229296712442, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0017466708281315023, |
|
"loss": 1.3427, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 6.346233874323762, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.0017461506450270495, |
|
"loss": 1.3466, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 6.359238451935081, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.001745630461922597, |
|
"loss": 1.3414, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 6.3722430295464, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0017451102788181442, |
|
"loss": 1.3433, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 6.385247607157719, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0017445900957136914, |
|
"loss": 1.3398, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 6.398252184769039, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0017440699126092386, |
|
"loss": 1.3425, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 6.411256762380358, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0017435497295047858, |
|
"loss": 1.3447, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 6.424261339991677, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0017430295464003329, |
|
"loss": 1.3427, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 6.437265917602996, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00174250936329588, |
|
"loss": 1.343, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 6.4502704952143155, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0017419891801914273, |
|
"loss": 1.3429, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 6.463275072825635, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0017414689970869745, |
|
"loss": 1.3439, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 6.476279650436954, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0017409488139825218, |
|
"loss": 1.3433, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 6.489284228048273, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.001740428630878069, |
|
"loss": 1.344, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 6.502288805659592, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0017399084477736164, |
|
"loss": 1.3424, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 6.515293383270912, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0017393882646691637, |
|
"loss": 1.3416, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 6.528297960882231, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.001738868081564711, |
|
"loss": 1.3417, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 6.54130253849355, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.0017383478984602581, |
|
"loss": 1.3434, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 6.554307116104869, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.0017378277153558054, |
|
"loss": 1.3395, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 6.567311693716188, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0017373075322513526, |
|
"loss": 1.3422, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 6.580316271327507, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 0.0017367873491468998, |
|
"loss": 1.3405, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 6.593320848938826, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.001736267166042447, |
|
"loss": 1.3418, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 6.606325426550145, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.001735746982937994, |
|
"loss": 1.3412, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 6.619330004161465, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.0017352267998335413, |
|
"loss": 1.3426, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 6.632334581772784, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0017347066167290885, |
|
"loss": 1.3434, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 6.645339159384103, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.001734186433624636, |
|
"loss": 1.3441, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 6.658343736995422, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 0.0017336662505201832, |
|
"loss": 1.3421, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 6.671348314606742, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0017331460674157304, |
|
"loss": 1.3405, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 6.684352892218061, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0017326258843112776, |
|
"loss": 1.3409, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 6.69735746982938, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0017321057012068249, |
|
"loss": 1.3429, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 6.710362047440699, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.001731585518102372, |
|
"loss": 1.3409, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 6.7233666250520185, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0017310653349979193, |
|
"loss": 1.3404, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 6.736371202663338, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.0017305451518934666, |
|
"loss": 1.3406, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 6.749375780274657, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0017300249687890138, |
|
"loss": 1.341, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 6.762380357885976, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.001729504785684561, |
|
"loss": 1.3382, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 6.775384935497295, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 0.0017289846025801082, |
|
"loss": 1.3411, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 6.788389513108614, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0017284644194756553, |
|
"loss": 1.3429, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 6.801394090719933, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0017279442363712027, |
|
"loss": 1.3427, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 6.814398668331252, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00172742405326675, |
|
"loss": 1.3411, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 6.8274032459425715, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0017269038701622972, |
|
"loss": 1.3397, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 6.840407823553891, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.0017263836870578444, |
|
"loss": 1.3386, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 6.85341240116521, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0017258635039533916, |
|
"loss": 1.3392, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 6.866416978776529, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.0017253433208489388, |
|
"loss": 1.3424, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 6.8794215563878485, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.001724823137744486, |
|
"loss": 1.3418, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 6.892426133999168, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0017243029546400333, |
|
"loss": 1.3378, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 6.905430711610487, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0017237827715355805, |
|
"loss": 1.3398, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 6.918435289221806, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 0.0017232625884311278, |
|
"loss": 1.3411, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 6.931439866833125, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.001722742405326675, |
|
"loss": 1.3377, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 6.944444444444445, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0017222222222222224, |
|
"loss": 1.339, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 6.957449022055764, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.0017217020391177697, |
|
"loss": 1.3398, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 6.970453599667083, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0017211818560133169, |
|
"loss": 1.3397, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 6.983458177278402, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.001720661672908864, |
|
"loss": 1.3382, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 6.996462754889722, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0017201414898044111, |
|
"loss": 1.3396, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.3440358638763428, |
|
"eval_runtime": 0.7691, |
|
"eval_samples_per_second": 1300.178, |
|
"eval_steps_per_second": 5.201, |
|
"step": 269136 |
|
}, |
|
{ |
|
"epoch": 7.00946733250104, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0017196213066999584, |
|
"loss": 1.3346, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 7.022471910112359, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.0017191011235955056, |
|
"loss": 1.3376, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 7.035476487723678, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0017185809404910528, |
|
"loss": 1.3362, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 7.048481065334998, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.0017180607573866, |
|
"loss": 1.3358, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 7.061485642946317, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.0017175405742821473, |
|
"loss": 1.3352, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 7.074490220557636, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0017170203911776945, |
|
"loss": 1.3363, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 7.087494798168955, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 0.001716500208073242, |
|
"loss": 1.3376, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 7.100499375780275, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0017159800249687892, |
|
"loss": 1.3359, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 7.113503953391594, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 0.0017154598418643364, |
|
"loss": 1.3391, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 7.126508531002913, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0017149396587598836, |
|
"loss": 1.336, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 7.139513108614232, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.0017144194756554309, |
|
"loss": 1.3396, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 7.1525176862255515, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.001713899292550978, |
|
"loss": 1.3355, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 7.165522263836871, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.001713379109446525, |
|
"loss": 1.3334, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 7.17852684144819, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0017128589263420723, |
|
"loss": 1.3366, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 7.191531419059509, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0017123387432376196, |
|
"loss": 1.3369, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 7.2045359966708284, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0017118185601331668, |
|
"loss": 1.3365, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 7.217540574282148, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.001711298377028714, |
|
"loss": 1.3345, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 7.230545151893467, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0017107781939242615, |
|
"loss": 1.3364, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 7.243549729504785, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0017102580108198087, |
|
"loss": 1.3346, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 7.2565543071161045, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.001709737827715356, |
|
"loss": 1.3363, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 7.269558884727424, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0017092176446109031, |
|
"loss": 1.3375, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 7.282563462338743, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0017086974615064504, |
|
"loss": 1.3349, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 7.295568039950062, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.0017081772784019976, |
|
"loss": 1.3359, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 7.308572617561381, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0017076570952975448, |
|
"loss": 1.3331, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 7.321577195172701, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.001707136912193092, |
|
"loss": 1.3355, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 7.33458177278402, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0017066167290886393, |
|
"loss": 1.3356, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 7.347586350395339, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.0017060965459841863, |
|
"loss": 1.3329, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 7.360590928006658, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0017055763628797335, |
|
"loss": 1.3345, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 7.373595505617978, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0017050561797752807, |
|
"loss": 1.3375, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 7.386600083229297, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0017045359966708282, |
|
"loss": 1.337, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 7.399604660840616, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0017040158135663754, |
|
"loss": 1.3329, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 7.412609238451935, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0017034956304619227, |
|
"loss": 1.3368, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 7.4256138160632545, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0017029754473574699, |
|
"loss": 1.3342, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 7.438618393674574, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.001702455264253017, |
|
"loss": 1.3355, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 7.451622971285893, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0017019350811485643, |
|
"loss": 1.3323, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 7.464627548897212, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0017014148980441116, |
|
"loss": 1.3366, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 7.477632126508531, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 0.0017008947149396588, |
|
"loss": 1.3364, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 7.49063670411985, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.001700374531835206, |
|
"loss": 1.334, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 7.503641281731169, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0016998543487307533, |
|
"loss": 1.3348, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 7.516645859342488, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 0.0016993341656263005, |
|
"loss": 1.3378, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 7.5296504369538075, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.001698813982521848, |
|
"loss": 1.3348, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 7.542655014565127, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.001698293799417395, |
|
"loss": 1.3327, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 7.555659592176446, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0016977736163129422, |
|
"loss": 1.3348, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 7.568664169787765, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.0016972534332084894, |
|
"loss": 1.3347, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 7.5816687473990845, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0016967332501040366, |
|
"loss": 1.3345, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 7.594673325010404, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.0016962130669995838, |
|
"loss": 1.335, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 7.607677902621723, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.001695692883895131, |
|
"loss": 1.3339, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 7.620682480233042, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0016951727007906783, |
|
"loss": 1.3355, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 7.633687057844361, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0016946525176862255, |
|
"loss": 1.334, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 7.646691635455681, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.0016941323345817728, |
|
"loss": 1.332, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 7.659696213067, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00169361215147732, |
|
"loss": 1.3332, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 7.672700790678319, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0016930919683728674, |
|
"loss": 1.3336, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 7.6857053682896375, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0016925717852684147, |
|
"loss": 1.3318, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 7.698709945900957, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.001692051602163962, |
|
"loss": 1.3365, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 7.711714523512276, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0016915314190595091, |
|
"loss": 1.3334, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 7.724719101123595, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0016910112359550561, |
|
"loss": 1.3345, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 7.737723678734914, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.0016904910528506034, |
|
"loss": 1.3349, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 7.750728256346234, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0016899708697461506, |
|
"loss": 1.334, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 7.763732833957553, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0016894506866416978, |
|
"loss": 1.3321, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 7.776737411568872, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.001688930503537245, |
|
"loss": 1.333, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 7.789741989180191, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.0016884103204327923, |
|
"loss": 1.3331, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 7.802746566791511, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.0016878901373283395, |
|
"loss": 1.3346, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 7.81575114440283, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.001687369954223887, |
|
"loss": 1.3325, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 7.828755722014149, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.0016868497711194342, |
|
"loss": 1.3314, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 7.841760299625468, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0016863295880149814, |
|
"loss": 1.3336, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 7.8547648772367875, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0016858094049105286, |
|
"loss": 1.3378, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 7.867769454848107, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.0016852892218060759, |
|
"loss": 1.33, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 7.880774032459426, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.001684769038701623, |
|
"loss": 1.3354, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 7.893778610070745, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0016842488555971703, |
|
"loss": 1.3348, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 7.9067831876820645, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0016837286724927173, |
|
"loss": 1.3333, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 7.919787765293384, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0016832084893882646, |
|
"loss": 1.3345, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 7.932792342904703, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0016826883062838118, |
|
"loss": 1.333, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 7.945796920516021, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.001682168123179359, |
|
"loss": 1.3348, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 7.9588014981273405, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0016816479400749065, |
|
"loss": 1.3343, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 7.97180607573866, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0016811277569704537, |
|
"loss": 1.3348, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 7.984810653349979, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 0.001680607573866001, |
|
"loss": 1.3361, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 7.997815230961298, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0016800873907615481, |
|
"loss": 1.335, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.3374426364898682, |
|
"eval_runtime": 0.814, |
|
"eval_samples_per_second": 1228.431, |
|
"eval_steps_per_second": 4.914, |
|
"step": 307584 |
|
}, |
|
{ |
|
"epoch": 8.010819808572618, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0016795672076570954, |
|
"loss": 1.3305, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 8.023824386183938, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0016790470245526426, |
|
"loss": 1.3302, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 8.036828963795257, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0016785268414481898, |
|
"loss": 1.3295, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 8.049833541406574, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.001678006658343737, |
|
"loss": 1.331, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 8.062838119017893, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.0016774864752392843, |
|
"loss": 1.3299, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 8.075842696629213, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.0016769662921348315, |
|
"loss": 1.3321, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 8.088847274240532, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0016764461090303787, |
|
"loss": 1.3289, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 8.101851851851851, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 0.0016759259259259258, |
|
"loss": 1.3318, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 8.11485642946317, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0016754057428214732, |
|
"loss": 1.3326, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 8.12786100707449, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.0016748855597170204, |
|
"loss": 1.3277, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 8.140865584685809, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0016743653766125677, |
|
"loss": 1.3313, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 8.153870162297128, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 0.0016738451935081149, |
|
"loss": 1.331, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 8.166874739908447, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0016733250104036621, |
|
"loss": 1.3296, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 8.179879317519767, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0016728048272992093, |
|
"loss": 1.3291, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 8.192883895131086, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0016722846441947566, |
|
"loss": 1.3307, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 8.205888472742405, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0016717644610903038, |
|
"loss": 1.3317, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 8.218893050353724, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.001671244277985851, |
|
"loss": 1.3343, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 8.231897627965044, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0016707240948813983, |
|
"loss": 1.3311, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 8.244902205576363, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0016702039117769455, |
|
"loss": 1.3301, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 8.257906783187682, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.001669683728672493, |
|
"loss": 1.3313, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 8.270911360799001, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0016691635455680402, |
|
"loss": 1.3322, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 8.28391593841032, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0016686433624635872, |
|
"loss": 1.3309, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 8.29692051602164, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0016681231793591344, |
|
"loss": 1.3299, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 8.309925093632959, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0016676029962546816, |
|
"loss": 1.3295, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 8.322929671244278, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0016670828131502289, |
|
"loss": 1.3312, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 8.335934248855597, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.001666562630045776, |
|
"loss": 1.329, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 8.348938826466917, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0016660424469413233, |
|
"loss": 1.3312, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 8.361943404078236, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0016655222638368705, |
|
"loss": 1.3298, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 8.374947981689555, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0016650020807324178, |
|
"loss": 1.3282, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 8.387952559300874, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.001664481897627965, |
|
"loss": 1.3293, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 8.400957136912194, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.0016639617145235124, |
|
"loss": 1.333, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 8.413961714523513, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.0016634415314190597, |
|
"loss": 1.3282, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 8.426966292134832, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.001662921348314607, |
|
"loss": 1.3279, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 8.439970869746151, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0016624011652101541, |
|
"loss": 1.3314, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 8.45297544735747, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0016618809821057014, |
|
"loss": 1.33, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 8.46598002496879, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.0016613607990012484, |
|
"loss": 1.3287, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 8.478984602580109, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.0016608406158967956, |
|
"loss": 1.331, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 8.491989180191428, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.0016603204327923428, |
|
"loss": 1.3306, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 8.504993757802747, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00165980024968789, |
|
"loss": 1.3311, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 8.517998335414065, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0016592800665834373, |
|
"loss": 1.3315, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 8.531002913025384, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0016587598834789845, |
|
"loss": 1.3307, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 8.544007490636703, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.001658239700374532, |
|
"loss": 1.3323, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 8.557012068248023, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0016577195172700792, |
|
"loss": 1.3308, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 8.570016645859342, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.0016571993341656264, |
|
"loss": 1.3297, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 8.583021223470661, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.0016566791510611736, |
|
"loss": 1.3301, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 8.59602580108198, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.0016561589679567209, |
|
"loss": 1.3297, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 8.6090303786933, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.001655638784852268, |
|
"loss": 1.3292, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 8.622034956304619, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.0016551186017478153, |
|
"loss": 1.3295, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 8.635039533915938, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0016545984186433626, |
|
"loss": 1.3291, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 8.648044111527257, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0016540782355389098, |
|
"loss": 1.3296, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 8.661048689138577, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.0016535580524344568, |
|
"loss": 1.3273, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 8.674053266749896, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.001653037869330004, |
|
"loss": 1.3299, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 8.687057844361215, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.0016525176862255513, |
|
"loss": 1.3287, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 8.700062421972534, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.0016519975031210987, |
|
"loss": 1.3295, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 8.713066999583853, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.001651477320016646, |
|
"loss": 1.3264, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 8.726071577195173, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0016509571369121932, |
|
"loss": 1.3281, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 8.739076154806492, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0016504369538077404, |
|
"loss": 1.3295, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 8.752080732417811, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.0016499167707032876, |
|
"loss": 1.3286, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 8.76508531002913, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0016493965875988348, |
|
"loss": 1.327, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 8.77808988764045, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.001648876404494382, |
|
"loss": 1.3269, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 8.791094465251769, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0016483562213899293, |
|
"loss": 1.3287, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 8.804099042863088, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0016478360382854765, |
|
"loss": 1.3279, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 8.817103620474407, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0016473158551810238, |
|
"loss": 1.3304, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 8.830108198085727, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.001646795672076571, |
|
"loss": 1.3286, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 8.843112775697046, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 0.0016462754889721182, |
|
"loss": 1.3299, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 8.856117353308365, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0016457553058676654, |
|
"loss": 1.3283, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 8.869121930919684, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0016452351227632127, |
|
"loss": 1.3289, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 8.882126508531003, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00164471493965876, |
|
"loss": 1.3299, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 8.895131086142323, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0016441947565543071, |
|
"loss": 1.3291, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 8.908135663753642, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 0.0016436745734498544, |
|
"loss": 1.3321, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 8.921140241364961, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0016431543903454016, |
|
"loss": 1.3266, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 8.93414481897628, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0016426342072409488, |
|
"loss": 1.3289, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 8.947149396587598, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.001642114024136496, |
|
"loss": 1.3273, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 8.960153974198917, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.0016415938410320433, |
|
"loss": 1.3283, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 8.973158551810236, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 0.0016410736579275905, |
|
"loss": 1.3267, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 8.986163129421556, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.001640553474823138, |
|
"loss": 1.3272, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 8.999167707032875, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0016400332917186852, |
|
"loss": 1.3262, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.3324060440063477, |
|
"eval_runtime": 0.7089, |
|
"eval_samples_per_second": 1410.561, |
|
"eval_steps_per_second": 5.642, |
|
"step": 346032 |
|
}, |
|
{ |
|
"epoch": 9.012172284644194, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.0016395131086142324, |
|
"loss": 1.3265, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 9.025176862255513, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0016389929255097794, |
|
"loss": 1.3243, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 9.038181439866833, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0016384727424053266, |
|
"loss": 1.3273, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 9.051186017478152, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 0.0016379525593008739, |
|
"loss": 1.3269, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 9.064190595089471, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.001637432376196421, |
|
"loss": 1.3264, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 9.07719517270079, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0016369121930919683, |
|
"loss": 1.3274, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 9.09019975031211, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.0016363920099875156, |
|
"loss": 1.3251, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 9.103204327923429, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.0016358718268830628, |
|
"loss": 1.3239, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 9.116208905534748, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.00163535164377861, |
|
"loss": 1.3263, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 9.129213483146067, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0016348314606741575, |
|
"loss": 1.3257, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 9.142218060757386, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0016343112775697047, |
|
"loss": 1.327, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 9.155222638368706, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.001633791094465252, |
|
"loss": 1.3262, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 9.168227215980025, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0016332709113607991, |
|
"loss": 1.327, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 9.181231793591344, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.0016327507282563464, |
|
"loss": 1.3247, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 9.194236371202663, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0016322305451518936, |
|
"loss": 1.3278, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 9.207240948813983, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0016317103620474408, |
|
"loss": 1.3253, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 9.220245526425302, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0016311901789429878, |
|
"loss": 1.3275, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 9.233250104036621, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.001630669995838535, |
|
"loss": 1.328, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 9.24625468164794, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.0016301498127340823, |
|
"loss": 1.3296, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 9.25925925925926, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0016296296296296295, |
|
"loss": 1.3279, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 9.272263836870579, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.0016291094465251768, |
|
"loss": 1.3281, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 9.285268414481898, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0016285892634207242, |
|
"loss": 1.3286, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 9.298272992093217, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0016280690803162714, |
|
"loss": 1.325, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 9.311277569704536, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0016275488972118187, |
|
"loss": 1.3296, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 9.324282147315856, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0016270287141073659, |
|
"loss": 1.3267, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 9.337286724927175, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0016265085310029131, |
|
"loss": 1.3264, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 9.350291302538494, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.0016259883478984603, |
|
"loss": 1.3284, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 9.363295880149813, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0016254681647940076, |
|
"loss": 1.3278, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 9.376300457761133, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.0016249479816895548, |
|
"loss": 1.3286, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 9.389305035372452, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.001624427798585102, |
|
"loss": 1.3271, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 9.402309612983771, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.001623907615480649, |
|
"loss": 1.3302, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 9.41531419059509, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0016233874323761963, |
|
"loss": 1.328, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 9.428318768206408, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 0.0016228672492717437, |
|
"loss": 1.329, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 9.441323345817727, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 0.001622347066167291, |
|
"loss": 1.3263, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 9.454327923429046, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0016218268830628382, |
|
"loss": 1.3274, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 9.467332501040365, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 0.0016213066999583854, |
|
"loss": 1.3277, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 9.480337078651685, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0016207865168539326, |
|
"loss": 1.3267, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 9.493341656263004, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.0016202663337494799, |
|
"loss": 1.328, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 9.506346233874323, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.001619746150645027, |
|
"loss": 1.325, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 9.519350811485642, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0016192259675405743, |
|
"loss": 1.3243, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 9.532355389096962, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0016187057844361215, |
|
"loss": 1.3275, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 9.545359966708281, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0016181856013316688, |
|
"loss": 1.327, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 9.5583645443196, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 0.001617665418227216, |
|
"loss": 1.3273, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 9.57136912193092, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0016171452351227634, |
|
"loss": 1.3253, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 9.584373699542239, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0016166250520183105, |
|
"loss": 1.3245, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 9.597378277153558, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0016161048689138577, |
|
"loss": 1.3262, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 9.610382854764877, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 0.001615584685809405, |
|
"loss": 1.3269, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 9.623387432376196, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.0016150645027049521, |
|
"loss": 1.3265, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 9.636392009987516, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0016145443196004994, |
|
"loss": 1.3274, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 9.649396587598835, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0016140241364960466, |
|
"loss": 1.3255, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 9.662401165210154, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.0016135039533915938, |
|
"loss": 1.3281, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 9.675405742821473, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.001612983770287141, |
|
"loss": 1.3255, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 9.688410320432792, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 0.0016124635871826883, |
|
"loss": 1.3243, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 9.701414898044112, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0016119434040782355, |
|
"loss": 1.3219, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 9.714419475655431, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.001611423220973783, |
|
"loss": 1.3245, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 9.72742405326675, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 0.0016109030378693302, |
|
"loss": 1.3235, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 9.74042863087807, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0016103828547648774, |
|
"loss": 1.3249, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 9.753433208489389, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0016098626716604246, |
|
"loss": 1.3235, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 9.766437786100708, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 0.0016093424885559717, |
|
"loss": 1.3246, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 9.779442363712027, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0016088223054515189, |
|
"loss": 1.3261, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 9.792446941323346, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0016083021223470661, |
|
"loss": 1.3246, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 9.805451518934666, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.0016077819392426133, |
|
"loss": 1.3242, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 9.818456096545985, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0016072617561381606, |
|
"loss": 1.3268, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 9.831460674157304, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0016067415730337078, |
|
"loss": 1.327, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 9.844465251768623, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.001606221389929255, |
|
"loss": 1.3246, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 9.857469829379943, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0016057012068248025, |
|
"loss": 1.323, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 9.87047440699126, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0016051810237203497, |
|
"loss": 1.3247, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 9.88347898460258, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.001604660840615897, |
|
"loss": 1.3228, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 9.896483562213898, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0016041406575114442, |
|
"loss": 1.3268, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 9.909488139825218, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0016036204744069914, |
|
"loss": 1.3253, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 9.922492717436537, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0016031002913025386, |
|
"loss": 1.3239, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 9.935497295047856, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 0.0016025801081980858, |
|
"loss": 1.3257, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 9.948501872659175, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.001602059925093633, |
|
"loss": 1.3239, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 9.961506450270495, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00160153974198918, |
|
"loss": 1.3247, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 9.974511027881814, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 0.0016010195588847273, |
|
"loss": 1.3239, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 9.987515605493133, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0016004993757802745, |
|
"loss": 1.3226, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.3314919471740723, |
|
"eval_runtime": 0.597, |
|
"eval_samples_per_second": 1675.105, |
|
"eval_steps_per_second": 6.7, |
|
"step": 384480 |
|
}, |
|
{ |
|
"epoch": 10.000520183104452, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.0015999791926758218, |
|
"loss": 1.3244, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 10.013524760715772, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0015994590095713692, |
|
"loss": 1.3234, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 10.02652933832709, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.0015989388264669164, |
|
"loss": 1.3219, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 10.03953391593841, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0015984186433624637, |
|
"loss": 1.3208, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 10.05253849354973, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.001597898460258011, |
|
"loss": 1.3214, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 10.065543071161049, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0015973782771535581, |
|
"loss": 1.3264, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 10.078547648772368, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0015968580940491054, |
|
"loss": 1.3225, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 10.091552226383687, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0015963379109446526, |
|
"loss": 1.324, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 10.104556803995006, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.0015958177278401998, |
|
"loss": 1.322, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 10.117561381606325, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.001595297544735747, |
|
"loss": 1.322, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 10.130565959217645, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0015947773616312943, |
|
"loss": 1.3236, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 10.143570536828964, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.0015942571785268413, |
|
"loss": 1.3237, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 10.156575114440283, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0015937369954223887, |
|
"loss": 1.3214, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 10.169579692051602, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.001593216812317936, |
|
"loss": 1.3217, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 10.182584269662922, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0015926966292134832, |
|
"loss": 1.3234, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 10.19558884727424, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 0.0015921764461090304, |
|
"loss": 1.3241, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 10.20859342488556, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0015916562630045776, |
|
"loss": 1.3219, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 10.22159800249688, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0015911360799001249, |
|
"loss": 1.3233, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 10.234602580108199, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.001590615896795672, |
|
"loss": 1.3239, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 10.247607157719518, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0015900957136912193, |
|
"loss": 1.3235, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 10.260611735330837, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0015895755305867666, |
|
"loss": 1.3228, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 10.273616312942156, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0015890553474823138, |
|
"loss": 1.3249, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 10.286620890553476, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.001588535164377861, |
|
"loss": 1.323, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 10.299625468164795, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.0015880149812734085, |
|
"loss": 1.3216, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 10.312630045776114, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0015874947981689557, |
|
"loss": 1.3242, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 10.325634623387433, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0015869746150645027, |
|
"loss": 1.3236, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 10.338639200998752, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00158645443196005, |
|
"loss": 1.3207, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 10.35164377861007, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0015859342488555972, |
|
"loss": 1.3215, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 10.36464835622139, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.0015854140657511444, |
|
"loss": 1.3216, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 10.377652933832708, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0015848938826466916, |
|
"loss": 1.3223, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 10.390657511444028, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.0015843736995422388, |
|
"loss": 1.3195, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 10.403662089055347, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.001583853516437786, |
|
"loss": 1.3197, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 10.416666666666666, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0015833333333333333, |
|
"loss": 1.3225, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 10.429671244277985, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0015828131502288805, |
|
"loss": 1.3223, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 10.442675821889305, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.001582292967124428, |
|
"loss": 1.3231, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 10.455680399500624, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0015817727840199752, |
|
"loss": 1.324, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 10.468684977111943, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0015812526009155224, |
|
"loss": 1.3217, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 10.481689554723262, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 0.0015807324178110697, |
|
"loss": 1.3238, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 10.494694132334581, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0015802122347066169, |
|
"loss": 1.3235, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 10.5076987099459, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.0015796920516021641, |
|
"loss": 1.3233, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 10.52070328755722, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0015791718684977111, |
|
"loss": 1.3209, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 10.53370786516854, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.0015786516853932584, |
|
"loss": 1.3207, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 10.546712442779858, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.0015781315022888056, |
|
"loss": 1.3225, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 10.559717020391178, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 0.0015776113191843528, |
|
"loss": 1.3186, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 10.572721598002497, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 0.0015770911360799, |
|
"loss": 1.3233, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 10.585726175613816, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0015765709529754473, |
|
"loss": 1.324, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 10.598730753225135, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.0015760507698709947, |
|
"loss": 1.3247, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 10.611735330836455, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 0.001575530586766542, |
|
"loss": 1.3247, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 10.624739908447774, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0015750104036620892, |
|
"loss": 1.3217, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 10.637744486059093, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.0015744902205576364, |
|
"loss": 1.3215, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 10.650749063670412, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0015739700374531836, |
|
"loss": 1.3236, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 10.663753641281732, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 0.0015734498543487309, |
|
"loss": 1.3252, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 10.67675821889305, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.001572929671244278, |
|
"loss": 1.3238, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 10.68976279650437, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 0.0015724094881398253, |
|
"loss": 1.3225, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 10.70276737411569, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 0.0015718893050353723, |
|
"loss": 1.3195, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 10.715771951727008, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.0015713691219309195, |
|
"loss": 1.3218, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 10.728776529338328, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0015708489388264668, |
|
"loss": 1.3224, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 10.741781106949647, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0015703287557220142, |
|
"loss": 1.3203, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 10.754785684560966, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0015698085726175615, |
|
"loss": 1.3207, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 10.767790262172285, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.0015692883895131087, |
|
"loss": 1.3208, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 10.780794839783605, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.001568768206408656, |
|
"loss": 1.3225, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 10.793799417394922, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.0015682480233042031, |
|
"loss": 1.326, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 10.806803995006241, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.0015677278401997504, |
|
"loss": 1.3212, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 10.81980857261756, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0015672076570952976, |
|
"loss": 1.3227, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 10.83281315022888, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.0015666874739908448, |
|
"loss": 1.3217, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 10.845817727840199, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.001566167290886392, |
|
"loss": 1.3217, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 10.858822305451518, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0015656471077819393, |
|
"loss": 1.3227, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 10.871826883062838, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0015651269246774865, |
|
"loss": 1.3235, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 10.884831460674157, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 0.0015646067415730337, |
|
"loss": 1.3217, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 10.897836038285476, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.001564086558468581, |
|
"loss": 1.3208, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 10.910840615896795, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.0015635663753641282, |
|
"loss": 1.3222, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 10.923845193508114, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0015630461922596754, |
|
"loss": 1.3214, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 10.936849771119434, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0015625260091552226, |
|
"loss": 1.3254, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 10.949854348730753, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.0015620058260507699, |
|
"loss": 1.3233, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 10.962858926342072, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.001561485642946317, |
|
"loss": 1.3219, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 10.975863503953391, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 0.0015609654598418643, |
|
"loss": 1.3225, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 10.98886808156471, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 0.0015604452767374116, |
|
"loss": 1.3211, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 1.3315958976745605, |
|
"eval_runtime": 0.7007, |
|
"eval_samples_per_second": 1427.058, |
|
"eval_steps_per_second": 5.708, |
|
"step": 422928 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 1922400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 3, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.879327056150544e+19, |
|
"train_batch_size": 256, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|