|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.039255671679837, |
|
"eval_steps": 500, |
|
"global_step": 8000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02549069589599796, |
|
"grad_norm": 3183.12646484375, |
|
"learning_rate": 2.5e-06, |
|
"loss": 227.1212, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05098139179199592, |
|
"grad_norm": 363.3997802734375, |
|
"learning_rate": 5e-06, |
|
"loss": 88.4222, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07647208768799388, |
|
"grad_norm": 263.5315246582031, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 67.8251, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.10196278358399184, |
|
"grad_norm": 328.4805908203125, |
|
"learning_rate": 1e-05, |
|
"loss": 63.1532, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1274534794799898, |
|
"grad_norm": 496.2308044433594, |
|
"learning_rate": 9.995728791936505e-06, |
|
"loss": 60.2136, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.15294417537598776, |
|
"grad_norm": 405.20025634765625, |
|
"learning_rate": 9.98292246503335e-06, |
|
"loss": 59.0975, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.17843487127198573, |
|
"grad_norm": 451.58197021484375, |
|
"learning_rate": 9.961602898685225e-06, |
|
"loss": 55.2574, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.20392556716798368, |
|
"grad_norm": 127.22090148925781, |
|
"learning_rate": 9.931806517013612e-06, |
|
"loss": 56.7531, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.22941626306398163, |
|
"grad_norm": 431.8385925292969, |
|
"learning_rate": 9.893584226636773e-06, |
|
"loss": 57.5419, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.2549069589599796, |
|
"grad_norm": 249.0337677001953, |
|
"learning_rate": 9.847001329696653e-06, |
|
"loss": 57.2545, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.28039765485597756, |
|
"grad_norm": 726.9508056640625, |
|
"learning_rate": 9.792137412291265e-06, |
|
"loss": 56.1048, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.3058883507519755, |
|
"grad_norm": 811.3701171875, |
|
"learning_rate": 9.729086208503174e-06, |
|
"loss": 53.1675, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.33137904664797346, |
|
"grad_norm": 754.3577270507812, |
|
"learning_rate": 9.657955440256396e-06, |
|
"loss": 54.9404, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.35686974254397147, |
|
"grad_norm": 267.7255554199219, |
|
"learning_rate": 9.578866633275289e-06, |
|
"loss": 55.7077, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.3823604384399694, |
|
"grad_norm": 244.13253784179688, |
|
"learning_rate": 9.491954909459895e-06, |
|
"loss": 56.8997, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.40785113433596737, |
|
"grad_norm": 302.5712890625, |
|
"learning_rate": 9.397368756032445e-06, |
|
"loss": 50.8917, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.4333418302319653, |
|
"grad_norm": 660.6898193359375, |
|
"learning_rate": 9.295269771849426e-06, |
|
"loss": 49.8636, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.45883252612796327, |
|
"grad_norm": 235.12942504882812, |
|
"learning_rate": 9.185832391312644e-06, |
|
"loss": 53.0454, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.4843232220239613, |
|
"grad_norm": 231.6936492919922, |
|
"learning_rate": 9.069243586350976e-06, |
|
"loss": 56.1398, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.5098139179199592, |
|
"grad_norm": 460.98486328125, |
|
"learning_rate": 8.94570254698197e-06, |
|
"loss": 52.8867, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5353046138159572, |
|
"grad_norm": 567.8645629882812, |
|
"learning_rate": 8.815420340999034e-06, |
|
"loss": 56.6194, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.5607953097119551, |
|
"grad_norm": 158.99224853515625, |
|
"learning_rate": 8.67861955336566e-06, |
|
"loss": 50.311, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.5862860056079531, |
|
"grad_norm": 463.69635009765625, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 48.126, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.611776701503951, |
|
"grad_norm": 346.60333251953125, |
|
"learning_rate": 8.386407858128707e-06, |
|
"loss": 51.602, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.637267397399949, |
|
"grad_norm": 430.1127624511719, |
|
"learning_rate": 8.231496189304704e-06, |
|
"loss": 50.3868, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.6627580932959469, |
|
"grad_norm": 430.7168884277344, |
|
"learning_rate": 8.071063563448341e-06, |
|
"loss": 49.5458, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.688248789191945, |
|
"grad_norm": 409.49114990234375, |
|
"learning_rate": 7.905384077009693e-06, |
|
"loss": 51.5311, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.7137394850879429, |
|
"grad_norm": 443.4747619628906, |
|
"learning_rate": 7.734740790612137e-06, |
|
"loss": 50.3823, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.7392301809839409, |
|
"grad_norm": 459.6910705566406, |
|
"learning_rate": 7.559425245448006e-06, |
|
"loss": 46.766, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.7647208768799388, |
|
"grad_norm": 579.1948852539062, |
|
"learning_rate": 7.379736965185369e-06, |
|
"loss": 51.7392, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.7902115727759368, |
|
"grad_norm": 329.1972961425781, |
|
"learning_rate": 7.195982944236853e-06, |
|
"loss": 51.0259, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.8157022686719347, |
|
"grad_norm": 488.2768859863281, |
|
"learning_rate": 7.008477123264849e-06, |
|
"loss": 53.4051, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.8411929645679327, |
|
"grad_norm": 463.906494140625, |
|
"learning_rate": 6.817539852819149e-06, |
|
"loss": 43.2941, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.8666836604639306, |
|
"grad_norm": 263.90185546875, |
|
"learning_rate": 6.6234973460234184e-06, |
|
"loss": 50.0815, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.8921743563599286, |
|
"grad_norm": 186.7496337890625, |
|
"learning_rate": 6.426681121245527e-06, |
|
"loss": 49.7881, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.9176650522559265, |
|
"grad_norm": 398.3317565917969, |
|
"learning_rate": 6.227427435703997e-06, |
|
"loss": 50.899, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.9431557481519246, |
|
"grad_norm": 238.25408935546875, |
|
"learning_rate": 6.026076710978172e-06, |
|
"loss": 44.1347, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.9686464440479226, |
|
"grad_norm": 567.3057861328125, |
|
"learning_rate": 5.82297295140367e-06, |
|
"loss": 50.0442, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.9941371399439205, |
|
"grad_norm": 226.54409790039062, |
|
"learning_rate": 5.61846315634674e-06, |
|
"loss": 41.8183, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2458.4501953125, |
|
"eval_runtime": 28.4173, |
|
"eval_samples_per_second": 122.707, |
|
"eval_steps_per_second": 15.343, |
|
"step": 3923 |
|
}, |
|
{ |
|
"epoch": 1.0196278358399185, |
|
"grad_norm": 698.6807861328125, |
|
"learning_rate": 5.412896727361663e-06, |
|
"loss": 44.033, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.0451185317359164, |
|
"grad_norm": 525.1095581054688, |
|
"learning_rate": 5.206624871244066e-06, |
|
"loss": 44.6573, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.0706092276319144, |
|
"grad_norm": 500.8614501953125, |
|
"learning_rate": 5e-06, |
|
"loss": 42.8117, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.0960999235279123, |
|
"grad_norm": 487.75689697265625, |
|
"learning_rate": 4.793375128755934e-06, |
|
"loss": 43.7467, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.1215906194239103, |
|
"grad_norm": 385.0135192871094, |
|
"learning_rate": 4.587103272638339e-06, |
|
"loss": 43.3253, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.1470813153199082, |
|
"grad_norm": 796.0384521484375, |
|
"learning_rate": 4.381536843653262e-06, |
|
"loss": 44.0489, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.1725720112159062, |
|
"grad_norm": 428.2261962890625, |
|
"learning_rate": 4.17702704859633e-06, |
|
"loss": 48.0535, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.198062707111904, |
|
"grad_norm": 385.2317199707031, |
|
"learning_rate": 3.973923289021829e-06, |
|
"loss": 43.4284, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.223553403007902, |
|
"grad_norm": 159.48098754882812, |
|
"learning_rate": 3.7725725642960047e-06, |
|
"loss": 40.4862, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.2490440989039, |
|
"grad_norm": 200.54164123535156, |
|
"learning_rate": 3.573318878754475e-06, |
|
"loss": 44.7052, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.274534794799898, |
|
"grad_norm": 301.8349304199219, |
|
"learning_rate": 3.3765026539765832e-06, |
|
"loss": 41.2848, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.3000254906958961, |
|
"grad_norm": 335.5228271484375, |
|
"learning_rate": 3.1824601471808504e-06, |
|
"loss": 43.5137, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.3255161865918939, |
|
"grad_norm": 94.21492767333984, |
|
"learning_rate": 2.991522876735154e-06, |
|
"loss": 41.9114, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.351006882487892, |
|
"grad_norm": 352.31048583984375, |
|
"learning_rate": 2.804017055763149e-06, |
|
"loss": 41.0477, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 1.3764975783838898, |
|
"grad_norm": 228.1314697265625, |
|
"learning_rate": 2.6202630348146323e-06, |
|
"loss": 38.6621, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.401988274279888, |
|
"grad_norm": 170.14947509765625, |
|
"learning_rate": 2.4405747545519966e-06, |
|
"loss": 40.0131, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.4274789701758859, |
|
"grad_norm": 166.94281005859375, |
|
"learning_rate": 2.265259209387867e-06, |
|
"loss": 42.6173, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.4529696660718838, |
|
"grad_norm": 255.24964904785156, |
|
"learning_rate": 2.094615922990309e-06, |
|
"loss": 41.4291, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.4784603619678818, |
|
"grad_norm": 296.5071716308594, |
|
"learning_rate": 1.928936436551661e-06, |
|
"loss": 39.6349, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 1.5039510578638797, |
|
"grad_norm": 227.43650817871094, |
|
"learning_rate": 1.7685038106952952e-06, |
|
"loss": 40.9158, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 1.5294417537598777, |
|
"grad_norm": 280.7147216796875, |
|
"learning_rate": 1.6135921418712959e-06, |
|
"loss": 45.4827, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.5549324496558756, |
|
"grad_norm": 166.27865600585938, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 40.6627, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 1.5804231455518736, |
|
"grad_norm": 389.299560546875, |
|
"learning_rate": 1.321380446634342e-06, |
|
"loss": 38.7581, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.6059138414478715, |
|
"grad_norm": 170.0509033203125, |
|
"learning_rate": 1.1845796590009684e-06, |
|
"loss": 40.3946, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.6314045373438695, |
|
"grad_norm": 287.59423828125, |
|
"learning_rate": 1.0542974530180327e-06, |
|
"loss": 43.811, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.6568952332398674, |
|
"grad_norm": 150.49293518066406, |
|
"learning_rate": 9.307564136490255e-07, |
|
"loss": 40.4753, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.6823859291358654, |
|
"grad_norm": 264.41253662109375, |
|
"learning_rate": 8.141676086873574e-07, |
|
"loss": 43.6258, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.7078766250318633, |
|
"grad_norm": 157.13479614257812, |
|
"learning_rate": 7.047302281505735e-07, |
|
"loss": 43.768, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 1.7333673209278613, |
|
"grad_norm": 206.51609802246094, |
|
"learning_rate": 6.026312439675553e-07, |
|
"loss": 39.8553, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.7588580168238592, |
|
"grad_norm": 415.54351806640625, |
|
"learning_rate": 5.080450905401057e-07, |
|
"loss": 37.9228, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.7843487127198574, |
|
"grad_norm": 189.60276794433594, |
|
"learning_rate": 4.211333667247125e-07, |
|
"loss": 41.446, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.8098394086158551, |
|
"grad_norm": 612.336181640625, |
|
"learning_rate": 3.420445597436056e-07, |
|
"loss": 39.8801, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 1.8353301045118533, |
|
"grad_norm": 338.9154052734375, |
|
"learning_rate": 2.7091379149682683e-07, |
|
"loss": 41.9962, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 1.860820800407851, |
|
"grad_norm": 207.06942749023438, |
|
"learning_rate": 2.0786258770873647e-07, |
|
"loss": 40.2771, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 1.8863114963038492, |
|
"grad_norm": 585.3238525390625, |
|
"learning_rate": 1.5299867030334815e-07, |
|
"loss": 37.347, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 1.911802192199847, |
|
"grad_norm": 224.99607849121094, |
|
"learning_rate": 1.0641577336322761e-07, |
|
"loss": 45.2395, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.937292888095845, |
|
"grad_norm": 304.6292724609375, |
|
"learning_rate": 6.819348298638839e-08, |
|
"loss": 51.0356, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 1.9627835839918428, |
|
"grad_norm": 1007.5321044921875, |
|
"learning_rate": 3.839710131477492e-08, |
|
"loss": 44.2402, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 1.988274279887841, |
|
"grad_norm": 436.2712097167969, |
|
"learning_rate": 1.7077534966650767e-08, |
|
"loss": 41.5961, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2133.657470703125, |
|
"eval_runtime": 28.5718, |
|
"eval_samples_per_second": 122.043, |
|
"eval_steps_per_second": 15.26, |
|
"step": 7846 |
|
}, |
|
{ |
|
"epoch": 2.0137649757838387, |
|
"grad_norm": 354.5096740722656, |
|
"learning_rate": 4.2712080634949024e-09, |
|
"loss": 41.8052, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 2.039255671679837, |
|
"grad_norm": 220.47647094726562, |
|
"learning_rate": 0.0, |
|
"loss": 39.9241, |
|
"step": 8000 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 2000, |
|
"total_flos": 0.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|