|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 1250, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004, |
|
"grad_norm": 176.61734163359293, |
|
"learning_rate": 8e-08, |
|
"loss": 4.3238, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 155.411055284858, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 4.0646, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 133.32923635298718, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 3.906, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 80.21052252920832, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 3.4641, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 48.85213712192445, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 2.8464, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 45.776617949620864, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.5666, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 26.73992232818656, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.3056, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 17.729572998739787, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 2.0261, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 15.223894174418577, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.8306, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 17.850711162892512, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.8819, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 23.333457209821216, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.7672, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 15.269663674152135, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.6371, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 17.5332417329875, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.561, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 17.643721758122435, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.5423, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 11.734090209997497, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.5258, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 12.798342916618239, |
|
"learning_rate": 6e-06, |
|
"loss": 1.4755, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 10.898202349824153, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.5182, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 9.088500598331303, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.4451, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 10.983489966478087, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.4034, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 10.83379202012167, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 1.425, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 8.672594081659222, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.3219, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 9.23377777855437, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.3799, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 9.166396489821778, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.4142, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 7.0660631261512545, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 1.417, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 8.099995733491154, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.4513, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 7.288660952017101, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3968, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 11.534085489012515, |
|
"learning_rate": 9.999512620046523e-06, |
|
"loss": 1.3915, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 6.643837868783607, |
|
"learning_rate": 9.998050575201772e-06, |
|
"loss": 1.4451, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 8.528949559944818, |
|
"learning_rate": 9.995614150494293e-06, |
|
"loss": 1.3693, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 8.161397264280835, |
|
"learning_rate": 9.992203820909906e-06, |
|
"loss": 1.4197, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 7.882117733860753, |
|
"learning_rate": 9.987820251299121e-06, |
|
"loss": 1.3828, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 7.0280065330885995, |
|
"learning_rate": 9.982464296247523e-06, |
|
"loss": 1.4357, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 12.464091827730066, |
|
"learning_rate": 9.976136999909156e-06, |
|
"loss": 1.4414, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 6.4662291984046405, |
|
"learning_rate": 9.968839595802982e-06, |
|
"loss": 1.3905, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 6.65402151180169, |
|
"learning_rate": 9.960573506572391e-06, |
|
"loss": 1.4024, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 6.113615695740095, |
|
"learning_rate": 9.951340343707852e-06, |
|
"loss": 1.4059, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 6.665800966603425, |
|
"learning_rate": 9.941141907232766e-06, |
|
"loss": 1.331, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.685187563713334, |
|
"learning_rate": 9.929980185352525e-06, |
|
"loss": 1.3893, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 7.486719506086368, |
|
"learning_rate": 9.91785735406693e-06, |
|
"loss": 1.4275, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.888855898761242, |
|
"learning_rate": 9.904775776745959e-06, |
|
"loss": 1.4439, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 7.140808325295947, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 1.3542, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 6.336292620943448, |
|
"learning_rate": 9.875746771527817e-06, |
|
"loss": 1.413, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 7.079906024172797, |
|
"learning_rate": 9.859805002892733e-06, |
|
"loss": 1.3682, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 6.092306466503896, |
|
"learning_rate": 9.842915805643156e-06, |
|
"loss": 1.3372, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 7.149575331392461, |
|
"learning_rate": 9.825082472361558e-06, |
|
"loss": 1.3118, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 7.092971166660636, |
|
"learning_rate": 9.806308479691595e-06, |
|
"loss": 1.408, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 5.984139809585789, |
|
"learning_rate": 9.786597487660336e-06, |
|
"loss": 1.366, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.706031191925562, |
|
"learning_rate": 9.765953338964736e-06, |
|
"loss": 1.3484, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 6.2002602935958455, |
|
"learning_rate": 9.744380058222483e-06, |
|
"loss": 1.4072, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.8664026992140865, |
|
"learning_rate": 9.721881851187406e-06, |
|
"loss": 1.3566, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.9527848362443825, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 1.3774, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 6.374596828831512, |
|
"learning_rate": 9.674128381980073e-06, |
|
"loss": 0.9693, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 7.193932199691029, |
|
"learning_rate": 9.648882429441258e-06, |
|
"loss": 0.8766, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 6.524026562136591, |
|
"learning_rate": 9.622730168061568e-06, |
|
"loss": 0.9104, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 7.301263634828025, |
|
"learning_rate": 9.595676696276173e-06, |
|
"loss": 0.9334, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 5.670753086010248, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 0.8783, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 6.3479832974747525, |
|
"learning_rate": 9.538887392664544e-06, |
|
"loss": 0.9225, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.1400000000000001, |
|
"grad_norm": 5.625595616634227, |
|
"learning_rate": 9.50916263202557e-06, |
|
"loss": 0.9008, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 6.053431365139577, |
|
"learning_rate": 9.478558801197065e-06, |
|
"loss": 0.8751, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 6.445287713916751, |
|
"learning_rate": 9.44708186645649e-06, |
|
"loss": 0.8346, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 5.960574296608363, |
|
"learning_rate": 9.414737964294636e-06, |
|
"loss": 0.848, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 6.199669817245069, |
|
"learning_rate": 9.381533400219319e-06, |
|
"loss": 0.9026, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 6.03202099539365, |
|
"learning_rate": 9.347474647526095e-06, |
|
"loss": 0.9109, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 5.808928571078848, |
|
"learning_rate": 9.312568346036288e-06, |
|
"loss": 0.8967, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 5.49851518421863, |
|
"learning_rate": 9.276821300802535e-06, |
|
"loss": 0.8444, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 5.565049800849722, |
|
"learning_rate": 9.24024048078213e-06, |
|
"loss": 0.9639, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 5.562593169506186, |
|
"learning_rate": 9.202833017478421e-06, |
|
"loss": 0.8689, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 6.411205035883853, |
|
"learning_rate": 9.164606203550498e-06, |
|
"loss": 0.8717, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 5.773518519345432, |
|
"learning_rate": 9.125567491391476e-06, |
|
"loss": 0.8878, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 6.106878743329819, |
|
"learning_rate": 9.085724491675642e-06, |
|
"loss": 0.8751, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 5.602954058571908, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 0.8821, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 5.241008563893136, |
|
"learning_rate": 9.003656854743667e-06, |
|
"loss": 0.9555, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 5.977919483177927, |
|
"learning_rate": 8.961448216775955e-06, |
|
"loss": 0.8809, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 5.666258321453365, |
|
"learning_rate": 8.9184672866292e-06, |
|
"loss": 0.8557, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 7.0549468534641235, |
|
"learning_rate": 8.874722443520898e-06, |
|
"loss": 0.9716, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 5.706625987649317, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.8705, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 6.104256802083156, |
|
"learning_rate": 8.784975278258783e-06, |
|
"loss": 0.8187, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 6.5871280297893415, |
|
"learning_rate": 8.73899045249266e-06, |
|
"loss": 0.9132, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 5.57782141266722, |
|
"learning_rate": 8.692276703129421e-06, |
|
"loss": 0.9218, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 5.472134179814514, |
|
"learning_rate": 8.644843137107058e-06, |
|
"loss": 0.8681, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 6.397480891094298, |
|
"learning_rate": 8.596699001693257e-06, |
|
"loss": 0.9391, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 5.363456755073, |
|
"learning_rate": 8.547853682682605e-06, |
|
"loss": 0.8772, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.6400000000000001, |
|
"grad_norm": 5.259511329956283, |
|
"learning_rate": 8.498316702566828e-06, |
|
"loss": 0.8644, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6600000000000001, |
|
"grad_norm": 6.1682566237448775, |
|
"learning_rate": 8.44809771867835e-06, |
|
"loss": 0.947, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 5.7859091344204145, |
|
"learning_rate": 8.397206521307584e-06, |
|
"loss": 0.9254, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 6.057814591654113, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 0.9062, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 5.465194913584291, |
|
"learning_rate": 8.293447300593402e-06, |
|
"loss": 0.9132, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 5.608611072004163, |
|
"learning_rate": 8.240599505315656e-06, |
|
"loss": 0.8963, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 6.514982052300432, |
|
"learning_rate": 8.18711994874345e-06, |
|
"loss": 0.9708, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 5.3243654718837075, |
|
"learning_rate": 8.133019056822303e-06, |
|
"loss": 0.8674, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 6.04340107619584, |
|
"learning_rate": 8.078307376628292e-06, |
|
"loss": 0.9128, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.8199999999999998, |
|
"grad_norm": 5.704899222715775, |
|
"learning_rate": 8.022995574311876e-06, |
|
"loss": 0.8828, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 5.7674701231397805, |
|
"learning_rate": 7.967094433018508e-06, |
|
"loss": 0.9295, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8599999999999999, |
|
"grad_norm": 5.324848982286405, |
|
"learning_rate": 7.910614850786448e-06, |
|
"loss": 0.8745, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 6.686861233685566, |
|
"learning_rate": 7.85356783842216e-06, |
|
"loss": 0.9646, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 6.121104493662131, |
|
"learning_rate": 7.795964517353734e-06, |
|
"loss": 0.933, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 5.582536228375127, |
|
"learning_rate": 7.737816117462752e-06, |
|
"loss": 0.9262, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 6.077848142278746, |
|
"learning_rate": 7.679133974894984e-06, |
|
"loss": 0.9029, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 5.986835723576084, |
|
"learning_rate": 7.619929529850397e-06, |
|
"loss": 0.9204, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 4.648465110523929, |
|
"learning_rate": 7.560214324352858e-06, |
|
"loss": 0.855, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 6.394027835473984, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.9621, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 3.89451985928487, |
|
"learning_rate": 7.4392982956936644e-06, |
|
"loss": 0.4544, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 6.182703543573361, |
|
"learning_rate": 7.378121045351378e-06, |
|
"loss": 0.4096, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 6.193574779151475, |
|
"learning_rate": 7.31648017559931e-06, |
|
"loss": 0.4193, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 4.8681830081803605, |
|
"learning_rate": 7.254387703447154e-06, |
|
"loss": 0.4205, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 5.210546829726289, |
|
"learning_rate": 7.191855733945388e-06, |
|
"loss": 0.4374, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 4.646814925775646, |
|
"learning_rate": 7.128896457825364e-06, |
|
"loss": 0.4225, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 4.654154374920671, |
|
"learning_rate": 7.06552214912271e-06, |
|
"loss": 0.4276, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 5.3434847628458275, |
|
"learning_rate": 7.0017451627844765e-06, |
|
"loss": 0.432, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 4.709540680953104, |
|
"learning_rate": 6.9375779322605154e-06, |
|
"loss": 0.4378, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 4.834968890809526, |
|
"learning_rate": 6.873032967079562e-06, |
|
"loss": 0.4405, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 4.21487141814997, |
|
"learning_rate": 6.808122850410461e-06, |
|
"loss": 0.4322, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 5.761355844826787, |
|
"learning_rate": 6.7428602366090764e-06, |
|
"loss": 0.4207, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 4.752918487437509, |
|
"learning_rate": 6.677257848751276e-06, |
|
"loss": 0.4514, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.2800000000000002, |
|
"grad_norm": 6.0722053004977585, |
|
"learning_rate": 6.611328476152557e-06, |
|
"loss": 0.4447, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 3.9317893606382217, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 0.447, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 4.6089986065133, |
|
"learning_rate": 6.4785402502202345e-06, |
|
"loss": 0.4267, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 5.5318341884342646, |
|
"learning_rate": 6.411707284214384e-06, |
|
"loss": 0.4495, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 6.3158633504097965, |
|
"learning_rate": 6.344599103076329e-06, |
|
"loss": 0.4502, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 4.522299833445048, |
|
"learning_rate": 6.277228789678953e-06, |
|
"loss": 0.4408, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 4.75003515089972, |
|
"learning_rate": 6.209609477998339e-06, |
|
"loss": 0.424, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 4.849076130585631, |
|
"learning_rate": 6.141754350553279e-06, |
|
"loss": 0.4418, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 4.754914150256052, |
|
"learning_rate": 6.073676635835317e-06, |
|
"loss": 0.4379, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 5.217922728452035, |
|
"learning_rate": 6.005389605729824e-06, |
|
"loss": 0.4223, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 5.09656877734741, |
|
"learning_rate": 5.936906572928625e-06, |
|
"loss": 0.4345, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 4.94943409663634, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.4555, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 5.242233985299832, |
|
"learning_rate": 5.799405938459175e-06, |
|
"loss": 0.4513, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 4.4422684985992715, |
|
"learning_rate": 5.730415142812059e-06, |
|
"loss": 0.4288, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 3.912114397025565, |
|
"learning_rate": 5.661281951285613e-06, |
|
"loss": 0.4311, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 5.222721860673294, |
|
"learning_rate": 5.592019841532507e-06, |
|
"loss": 0.4648, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 4.724149942564251, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 0.4277, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 4.491400560249321, |
|
"learning_rate": 5.453162900988902e-06, |
|
"loss": 0.4369, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 4.8746449468292585, |
|
"learning_rate": 5.383595140634093e-06, |
|
"loss": 0.4079, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 3.82569667633887, |
|
"learning_rate": 5.3139525976465675e-06, |
|
"loss": 0.4441, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 4.346847846602506, |
|
"learning_rate": 5.244248848978067e-06, |
|
"loss": 0.45, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 4.388123844206694, |
|
"learning_rate": 5.174497483512506e-06, |
|
"loss": 0.4226, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 4.9729250460950185, |
|
"learning_rate": 5.1047120994167855e-06, |
|
"loss": 0.4362, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 4.062911661829956, |
|
"learning_rate": 5.034906301489808e-06, |
|
"loss": 0.421, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 3.985815433151041, |
|
"learning_rate": 4.965093698510192e-06, |
|
"loss": 0.4436, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.7800000000000002, |
|
"grad_norm": 5.323464036561023, |
|
"learning_rate": 4.895287900583216e-06, |
|
"loss": 0.4503, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 5.374509098625144, |
|
"learning_rate": 4.825502516487497e-06, |
|
"loss": 0.4453, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 4.590575855627389, |
|
"learning_rate": 4.755751151021934e-06, |
|
"loss": 0.4284, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 4.509037906020891, |
|
"learning_rate": 4.686047402353433e-06, |
|
"loss": 0.4553, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 4.565251181592848, |
|
"learning_rate": 4.6164048593659076e-06, |
|
"loss": 0.4313, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 4.2913762971120155, |
|
"learning_rate": 4.546837099011101e-06, |
|
"loss": 0.4071, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 5.0125093823770905, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 0.4263, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 5.618873097905976, |
|
"learning_rate": 4.4079801584674955e-06, |
|
"loss": 0.4501, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 6.075104024673319, |
|
"learning_rate": 4.3387180487143875e-06, |
|
"loss": 0.4389, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 4.00631519669371, |
|
"learning_rate": 4.269584857187942e-06, |
|
"loss": 0.4373, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 4.705162020934786, |
|
"learning_rate": 4.200594061540827e-06, |
|
"loss": 0.4389, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 5.44214376188593, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 0.3925, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 2.7924019809148586, |
|
"learning_rate": 4.063093427071376e-06, |
|
"loss": 0.2656, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 3.104774247282136, |
|
"learning_rate": 3.994610394270178e-06, |
|
"loss": 0.2405, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 3.343609631464514, |
|
"learning_rate": 3.926323364164684e-06, |
|
"loss": 0.2467, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 3.419736245359143, |
|
"learning_rate": 3.8582456494467214e-06, |
|
"loss": 0.2723, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 3.8565429188392186, |
|
"learning_rate": 3.790390522001662e-06, |
|
"loss": 0.2657, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 4.254486576860473, |
|
"learning_rate": 3.7227712103210485e-06, |
|
"loss": 0.2665, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 3.665505016144709, |
|
"learning_rate": 3.655400896923672e-06, |
|
"loss": 0.2622, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 2.8301720206592416, |
|
"learning_rate": 3.5882927157856175e-06, |
|
"loss": 0.2619, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 3.693549461824111, |
|
"learning_rate": 3.521459749779769e-06, |
|
"loss": 0.2512, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 3.5206986483032723, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 0.2526, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 3.3748884588505828, |
|
"learning_rate": 3.3886715238474454e-06, |
|
"loss": 0.2602, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 2.967732786836979, |
|
"learning_rate": 3.322742151248726e-06, |
|
"loss": 0.2606, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 4.073155951036809, |
|
"learning_rate": 3.2571397633909252e-06, |
|
"loss": 0.2602, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.2800000000000002, |
|
"grad_norm": 3.1633808640117236, |
|
"learning_rate": 3.1918771495895395e-06, |
|
"loss": 0.2455, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 3.1430107416422763, |
|
"learning_rate": 3.12696703292044e-06, |
|
"loss": 0.2532, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 4.278015207900964, |
|
"learning_rate": 3.0624220677394854e-06, |
|
"loss": 0.2491, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 4.508283169888662, |
|
"learning_rate": 2.9982548372155264e-06, |
|
"loss": 0.2698, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 3.3985357213270353, |
|
"learning_rate": 2.934477850877292e-06, |
|
"loss": 0.271, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 4.3177739641475625, |
|
"learning_rate": 2.871103542174637e-06, |
|
"loss": 0.2723, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 3.6904622227059964, |
|
"learning_rate": 2.8081442660546126e-06, |
|
"loss": 0.2593, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 2.9293826019569074, |
|
"learning_rate": 2.7456122965528475e-06, |
|
"loss": 0.2617, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 3.290303151322462, |
|
"learning_rate": 2.683519824400693e-06, |
|
"loss": 0.2582, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 3.2377051712154716, |
|
"learning_rate": 2.6218789546486235e-06, |
|
"loss": 0.262, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 3.968934702000483, |
|
"learning_rate": 2.560701704306336e-06, |
|
"loss": 0.2527, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 2.814982968549524, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.2547, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 2.8738241230711576, |
|
"learning_rate": 2.4397856756471435e-06, |
|
"loss": 0.2584, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 3.2477024834709396, |
|
"learning_rate": 2.380070470149605e-06, |
|
"loss": 0.2514, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 4.0745774880578844, |
|
"learning_rate": 2.320866025105016e-06, |
|
"loss": 0.266, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 3.440405002018834, |
|
"learning_rate": 2.2621838825372496e-06, |
|
"loss": 0.2637, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 3.3065115492957933, |
|
"learning_rate": 2.204035482646267e-06, |
|
"loss": 0.2589, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 2.495985385885236, |
|
"learning_rate": 2.146432161577842e-06, |
|
"loss": 0.2552, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 2.9350609154945473, |
|
"learning_rate": 2.0893851492135536e-06, |
|
"loss": 0.2549, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 3.6046449037840222, |
|
"learning_rate": 2.0329055669814936e-06, |
|
"loss": 0.2591, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 3.5165798523906577, |
|
"learning_rate": 1.977004425688126e-06, |
|
"loss": 0.2616, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 3.126960853099222, |
|
"learning_rate": 1.9216926233717087e-06, |
|
"loss": 0.2582, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.7199999999999998, |
|
"grad_norm": 3.0303889926026693, |
|
"learning_rate": 1.8669809431776991e-06, |
|
"loss": 0.2414, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 3.4976844042391506, |
|
"learning_rate": 1.8128800512565514e-06, |
|
"loss": 0.2654, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 4.624711265706133, |
|
"learning_rate": 1.7594004946843458e-06, |
|
"loss": 0.2601, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.7800000000000002, |
|
"grad_norm": 3.1163969659292547, |
|
"learning_rate": 1.7065526994065973e-06, |
|
"loss": 0.2635, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 4.134091674876607, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 0.2613, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 4.520449029040607, |
|
"learning_rate": 1.6027934786924187e-06, |
|
"loss": 0.2659, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 3.340691260013119, |
|
"learning_rate": 1.551902281321651e-06, |
|
"loss": 0.263, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 3.3324205347544362, |
|
"learning_rate": 1.5016832974331725e-06, |
|
"loss": 0.2674, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 3.1997988068352248, |
|
"learning_rate": 1.4521463173173966e-06, |
|
"loss": 0.25, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 2.4626614462088248, |
|
"learning_rate": 1.4033009983067454e-06, |
|
"loss": 0.2622, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 2.5755921562893342, |
|
"learning_rate": 1.3551568628929434e-06, |
|
"loss": 0.246, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 2.9337304572329717, |
|
"learning_rate": 1.3077232968705805e-06, |
|
"loss": 0.2403, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 3.6309216538671354, |
|
"learning_rate": 1.2610095475073415e-06, |
|
"loss": 0.2708, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 2.6536626242412056, |
|
"learning_rate": 1.2150247217412186e-06, |
|
"loss": 0.2495, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 3.8919822758192404, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.2763, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 2.6296022264068797, |
|
"learning_rate": 1.1252775564791023e-06, |
|
"loss": 0.2009, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 2.0687616816587715, |
|
"learning_rate": 1.0815327133708015e-06, |
|
"loss": 0.1978, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 2.036715506169882, |
|
"learning_rate": 1.0385517832240472e-06, |
|
"loss": 0.195, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 2.1444679630444647, |
|
"learning_rate": 9.963431452563331e-07, |
|
"loss": 0.195, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 2.3208387437528306, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 0.2003, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 2.6475787949798635, |
|
"learning_rate": 9.142755083243577e-07, |
|
"loss": 0.2012, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 2.0747944566043417, |
|
"learning_rate": 8.744325086085248e-07, |
|
"loss": 0.1839, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 2.5790204025275174, |
|
"learning_rate": 8.353937964495029e-07, |
|
"loss": 0.2078, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 2.279012511901917, |
|
"learning_rate": 7.971669825215789e-07, |
|
"loss": 0.2033, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 2.263082794933402, |
|
"learning_rate": 7.597595192178702e-07, |
|
"loss": 0.2013, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 1.746174251156355, |
|
"learning_rate": 7.23178699197467e-07, |
|
"loss": 0.1942, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 1.8889622200380844, |
|
"learning_rate": 6.874316539637127e-07, |
|
"loss": 0.192, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 1.9542919003413253, |
|
"learning_rate": 6.52525352473905e-07, |
|
"loss": 0.2073, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 2.110295600169004, |
|
"learning_rate": 6.184665997806832e-07, |
|
"loss": 0.1926, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 2.2234924989126563, |
|
"learning_rate": 5.852620357053651e-07, |
|
"loss": 0.2033, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 2.6413829746557504, |
|
"learning_rate": 5.529181335435124e-07, |
|
"loss": 0.198, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 2.097744663828809, |
|
"learning_rate": 5.214411988029355e-07, |
|
"loss": 0.2017, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 2.3107650980894263, |
|
"learning_rate": 4.908373679744316e-07, |
|
"loss": 0.2004, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 2.0392932247609665, |
|
"learning_rate": 4.6111260733545714e-07, |
|
"loss": 0.1978, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 1.9108697879670782, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 0.2064, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 2.0940301187428645, |
|
"learning_rate": 4.043233037238281e-07, |
|
"loss": 0.1992, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 2.1573212002556192, |
|
"learning_rate": 3.772698319384349e-07, |
|
"loss": 0.1978, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 1.7681339799095577, |
|
"learning_rate": 3.511175705587433e-07, |
|
"loss": 0.1949, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 2.6507223455216335, |
|
"learning_rate": 3.258716180199278e-07, |
|
"loss": 0.1974, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 4.899195247798816, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.1951, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 2.362208067582404, |
|
"learning_rate": 2.7811814881259503e-07, |
|
"loss": 0.1898, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 1.9828163543826154, |
|
"learning_rate": 2.556199417775174e-07, |
|
"loss": 0.2081, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 4.5600000000000005, |
|
"grad_norm": 1.9187249803005326, |
|
"learning_rate": 2.3404666103526542e-07, |
|
"loss": 0.2027, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 2.050797728763808, |
|
"learning_rate": 2.134025123396638e-07, |
|
"loss": 0.195, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 2.434262719508876, |
|
"learning_rate": 1.9369152030840553e-07, |
|
"loss": 0.1935, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 2.7114986108898496, |
|
"learning_rate": 1.7491752763844294e-07, |
|
"loss": 0.201, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 2.342848968749728, |
|
"learning_rate": 1.5708419435684463e-07, |
|
"loss": 0.2045, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 2.50611015184642, |
|
"learning_rate": 1.4019499710726913e-07, |
|
"loss": 0.2037, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 1.9845962796916226, |
|
"learning_rate": 1.2425322847218368e-07, |
|
"loss": 0.1902, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 2.066929322984826, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 0.1956, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 2.4138648473696853, |
|
"learning_rate": 9.522422325404234e-08, |
|
"loss": 0.1985, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 2.911095557310394, |
|
"learning_rate": 8.214264593307097e-08, |
|
"loss": 0.1965, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 2.030674439873152, |
|
"learning_rate": 7.001981464747565e-08, |
|
"loss": 0.1905, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 2.5046761639631696, |
|
"learning_rate": 5.8858092767236084e-08, |
|
"loss": 0.2016, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 2.6259164045048715, |
|
"learning_rate": 4.865965629214819e-08, |
|
"loss": 0.1991, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 2.5096024708693725, |
|
"learning_rate": 3.9426493427611177e-08, |
|
"loss": 0.1987, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 2.1146883842387783, |
|
"learning_rate": 3.1160404197018155e-08, |
|
"loss": 0.1915, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 2.1798112190278136, |
|
"learning_rate": 2.386300009084408e-08, |
|
"loss": 0.2055, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 2.1283754821360152, |
|
"learning_rate": 1.753570375247815e-08, |
|
"loss": 0.1941, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 2.945625263690615, |
|
"learning_rate": 1.2179748700879013e-08, |
|
"loss": 0.2051, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 2.3318033551646793, |
|
"learning_rate": 7.796179090094891e-09, |
|
"loss": 0.1929, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.9399999999999995, |
|
"grad_norm": 2.7773560139669944, |
|
"learning_rate": 4.385849505708084e-09, |
|
"loss": 0.1958, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 2.2518024677328685, |
|
"learning_rate": 1.9494247982282386e-09, |
|
"loss": 0.2032, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 2.0203646274765132, |
|
"learning_rate": 4.87379953478806e-10, |
|
"loss": 0.2075, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 2.9166287851342645, |
|
"learning_rate": 0.0, |
|
"loss": 0.1981, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 1250, |
|
"total_flos": 2442779688960.0, |
|
"train_loss": 0.6922085983276367, |
|
"train_runtime": 19109.4525, |
|
"train_samples_per_second": 1.047, |
|
"train_steps_per_second": 0.065 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1250, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2442779688960.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|