|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.948717948717949, |
|
"eval_steps": 500, |
|
"global_step": 970, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.020512820512820513, |
|
"grad_norm": 7.845447540283203, |
|
"learning_rate": 9.989690721649485e-05, |
|
"loss": 9.5248, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.041025641025641026, |
|
"grad_norm": 8.389087677001953, |
|
"learning_rate": 9.969072164948454e-05, |
|
"loss": 9.1949, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06153846153846154, |
|
"grad_norm": 5.169976711273193, |
|
"learning_rate": 9.948453608247423e-05, |
|
"loss": 8.6566, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.08205128205128205, |
|
"grad_norm": 4.878049850463867, |
|
"learning_rate": 9.927835051546392e-05, |
|
"loss": 8.1281, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.10256410256410256, |
|
"grad_norm": 4.062979698181152, |
|
"learning_rate": 9.907216494845362e-05, |
|
"loss": 7.7464, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.12307692307692308, |
|
"grad_norm": 2.26177716255188, |
|
"learning_rate": 9.88659793814433e-05, |
|
"loss": 7.4979, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.14358974358974358, |
|
"grad_norm": 1.6796530485153198, |
|
"learning_rate": 9.8659793814433e-05, |
|
"loss": 7.4101, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1641025641025641, |
|
"grad_norm": 1.3691627979278564, |
|
"learning_rate": 9.845360824742269e-05, |
|
"loss": 7.2759, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.18461538461538463, |
|
"grad_norm": 1.8158212900161743, |
|
"learning_rate": 9.824742268041237e-05, |
|
"loss": 7.1752, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.20512820512820512, |
|
"grad_norm": 1.489174246788025, |
|
"learning_rate": 9.804123711340207e-05, |
|
"loss": 7.2658, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.22564102564102564, |
|
"grad_norm": 1.363490104675293, |
|
"learning_rate": 9.783505154639175e-05, |
|
"loss": 7.1867, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.24615384615384617, |
|
"grad_norm": 1.5879547595977783, |
|
"learning_rate": 9.762886597938145e-05, |
|
"loss": 7.1374, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 1.046538233757019, |
|
"learning_rate": 9.742268041237114e-05, |
|
"loss": 7.106, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.28717948717948716, |
|
"grad_norm": 1.169195532798767, |
|
"learning_rate": 9.721649484536083e-05, |
|
"loss": 7.1945, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.8861628174781799, |
|
"learning_rate": 9.701030927835052e-05, |
|
"loss": 7.0199, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3282051282051282, |
|
"grad_norm": 1.4861819744110107, |
|
"learning_rate": 9.680412371134021e-05, |
|
"loss": 6.8774, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3487179487179487, |
|
"grad_norm": 0.9394097924232483, |
|
"learning_rate": 9.65979381443299e-05, |
|
"loss": 6.9136, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.36923076923076925, |
|
"grad_norm": 1.1648708581924438, |
|
"learning_rate": 9.639175257731959e-05, |
|
"loss": 7.121, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.38974358974358975, |
|
"grad_norm": 0.8142098784446716, |
|
"learning_rate": 9.618556701030928e-05, |
|
"loss": 7.1084, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.41025641025641024, |
|
"grad_norm": 1.0768697261810303, |
|
"learning_rate": 9.597938144329898e-05, |
|
"loss": 7.0565, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4307692307692308, |
|
"grad_norm": 0.6553409099578857, |
|
"learning_rate": 9.577319587628867e-05, |
|
"loss": 6.9963, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.4512820512820513, |
|
"grad_norm": 0.8519483208656311, |
|
"learning_rate": 9.556701030927836e-05, |
|
"loss": 7.0575, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4717948717948718, |
|
"grad_norm": 1.1110036373138428, |
|
"learning_rate": 9.536082474226805e-05, |
|
"loss": 6.998, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.49230769230769234, |
|
"grad_norm": 0.8143446445465088, |
|
"learning_rate": 9.515463917525773e-05, |
|
"loss": 6.9762, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 1.4277548789978027, |
|
"learning_rate": 9.494845360824743e-05, |
|
"loss": 6.9832, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.6880565881729126, |
|
"learning_rate": 9.474226804123711e-05, |
|
"loss": 7.0254, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5538461538461539, |
|
"grad_norm": 0.8316001296043396, |
|
"learning_rate": 9.45360824742268e-05, |
|
"loss": 6.9704, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5743589743589743, |
|
"grad_norm": 0.91054368019104, |
|
"learning_rate": 9.43298969072165e-05, |
|
"loss": 6.9184, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5948717948717949, |
|
"grad_norm": 0.7943957448005676, |
|
"learning_rate": 9.412371134020619e-05, |
|
"loss": 6.9518, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 1.0420604944229126, |
|
"learning_rate": 9.391752577319588e-05, |
|
"loss": 6.9369, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6358974358974359, |
|
"grad_norm": 0.7538536787033081, |
|
"learning_rate": 9.371134020618557e-05, |
|
"loss": 6.8748, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6564102564102564, |
|
"grad_norm": 1.2202829122543335, |
|
"learning_rate": 9.350515463917526e-05, |
|
"loss": 6.8046, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.676923076923077, |
|
"grad_norm": 1.0980966091156006, |
|
"learning_rate": 9.329896907216495e-05, |
|
"loss": 6.8146, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6974358974358974, |
|
"grad_norm": 1.5506396293640137, |
|
"learning_rate": 9.309278350515465e-05, |
|
"loss": 7.0011, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.717948717948718, |
|
"grad_norm": 1.1768453121185303, |
|
"learning_rate": 9.288659793814434e-05, |
|
"loss": 6.8345, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7384615384615385, |
|
"grad_norm": 1.1223098039627075, |
|
"learning_rate": 9.268041237113403e-05, |
|
"loss": 6.8371, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7589743589743589, |
|
"grad_norm": 0.7969951629638672, |
|
"learning_rate": 9.247422680412372e-05, |
|
"loss": 6.902, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7794871794871795, |
|
"grad_norm": 0.8236146569252014, |
|
"learning_rate": 9.22680412371134e-05, |
|
"loss": 6.8724, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0809398889541626, |
|
"learning_rate": 9.206185567010309e-05, |
|
"loss": 6.8455, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8205128205128205, |
|
"grad_norm": 1.0138286352157593, |
|
"learning_rate": 9.185567010309279e-05, |
|
"loss": 6.8771, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.841025641025641, |
|
"grad_norm": 1.5918043851852417, |
|
"learning_rate": 9.164948453608247e-05, |
|
"loss": 6.8593, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8615384615384616, |
|
"grad_norm": 1.9432188272476196, |
|
"learning_rate": 9.144329896907217e-05, |
|
"loss": 6.8676, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.882051282051282, |
|
"grad_norm": 1.5236680507659912, |
|
"learning_rate": 9.123711340206186e-05, |
|
"loss": 6.6954, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.9025641025641026, |
|
"grad_norm": 1.347008228302002, |
|
"learning_rate": 9.103092783505155e-05, |
|
"loss": 6.7739, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.8976010680198669, |
|
"learning_rate": 9.082474226804124e-05, |
|
"loss": 6.8272, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.9435897435897436, |
|
"grad_norm": 1.0713074207305908, |
|
"learning_rate": 9.061855670103093e-05, |
|
"loss": 6.722, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9641025641025641, |
|
"grad_norm": 1.3298990726470947, |
|
"learning_rate": 9.041237113402063e-05, |
|
"loss": 6.7779, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9846153846153847, |
|
"grad_norm": 1.3666430711746216, |
|
"learning_rate": 9.020618556701031e-05, |
|
"loss": 6.6804, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.005128205128205, |
|
"grad_norm": 1.5196622610092163, |
|
"learning_rate": 9e-05, |
|
"loss": 6.7935, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.0256410256410255, |
|
"grad_norm": 1.0106366872787476, |
|
"learning_rate": 8.97938144329897e-05, |
|
"loss": 6.7578, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.0461538461538462, |
|
"grad_norm": 0.9565912485122681, |
|
"learning_rate": 8.958762886597939e-05, |
|
"loss": 6.81, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 0.7427358627319336, |
|
"learning_rate": 8.938144329896908e-05, |
|
"loss": 6.7069, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.087179487179487, |
|
"grad_norm": 1.8422271013259888, |
|
"learning_rate": 8.917525773195877e-05, |
|
"loss": 6.7098, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.1076923076923078, |
|
"grad_norm": 0.5381059050559998, |
|
"learning_rate": 8.896907216494845e-05, |
|
"loss": 6.7934, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.1282051282051282, |
|
"grad_norm": 0.8050350546836853, |
|
"learning_rate": 8.876288659793815e-05, |
|
"loss": 6.6336, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.1487179487179486, |
|
"grad_norm": 0.570678174495697, |
|
"learning_rate": 8.855670103092783e-05, |
|
"loss": 6.8265, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.1692307692307693, |
|
"grad_norm": 0.5224724411964417, |
|
"learning_rate": 8.835051546391753e-05, |
|
"loss": 6.9796, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1897435897435897, |
|
"grad_norm": 0.6684609651565552, |
|
"learning_rate": 8.814432989690722e-05, |
|
"loss": 6.701, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.2102564102564102, |
|
"grad_norm": 0.9216952919960022, |
|
"learning_rate": 8.793814432989691e-05, |
|
"loss": 6.819, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 0.925770103931427, |
|
"learning_rate": 8.77319587628866e-05, |
|
"loss": 6.6087, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.2512820512820513, |
|
"grad_norm": 0.6525481343269348, |
|
"learning_rate": 8.752577319587629e-05, |
|
"loss": 6.7305, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.2717948717948717, |
|
"grad_norm": 0.7587235569953918, |
|
"learning_rate": 8.731958762886599e-05, |
|
"loss": 6.6621, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.2923076923076924, |
|
"grad_norm": 0.7010625600814819, |
|
"learning_rate": 8.711340206185567e-05, |
|
"loss": 6.6443, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.3128205128205128, |
|
"grad_norm": 0.7843243479728699, |
|
"learning_rate": 8.690721649484537e-05, |
|
"loss": 6.7056, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.8412699699401855, |
|
"learning_rate": 8.670103092783506e-05, |
|
"loss": 6.8242, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.353846153846154, |
|
"grad_norm": 0.899800181388855, |
|
"learning_rate": 8.649484536082475e-05, |
|
"loss": 6.7249, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.3743589743589744, |
|
"grad_norm": 1.413313388824463, |
|
"learning_rate": 8.628865979381444e-05, |
|
"loss": 6.6534, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.3948717948717948, |
|
"grad_norm": 1.53885018825531, |
|
"learning_rate": 8.608247422680413e-05, |
|
"loss": 6.6828, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.4153846153846155, |
|
"grad_norm": 0.7499423027038574, |
|
"learning_rate": 8.587628865979381e-05, |
|
"loss": 6.5888, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.435897435897436, |
|
"grad_norm": 0.48903796076774597, |
|
"learning_rate": 8.567010309278351e-05, |
|
"loss": 6.7217, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.4564102564102563, |
|
"grad_norm": 0.5164963006973267, |
|
"learning_rate": 8.546391752577319e-05, |
|
"loss": 6.7929, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.476923076923077, |
|
"grad_norm": 0.4488673210144043, |
|
"learning_rate": 8.525773195876289e-05, |
|
"loss": 6.7542, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.4974358974358974, |
|
"grad_norm": 0.9296787977218628, |
|
"learning_rate": 8.505154639175259e-05, |
|
"loss": 6.595, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.5179487179487179, |
|
"grad_norm": 0.48948934674263, |
|
"learning_rate": 8.484536082474227e-05, |
|
"loss": 6.6336, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 1.3344464302062988, |
|
"learning_rate": 8.463917525773197e-05, |
|
"loss": 6.5617, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.558974358974359, |
|
"grad_norm": 0.8237192034721375, |
|
"learning_rate": 8.443298969072165e-05, |
|
"loss": 6.6697, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.5794871794871796, |
|
"grad_norm": 1.013886570930481, |
|
"learning_rate": 8.422680412371135e-05, |
|
"loss": 6.5097, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5843191742897034, |
|
"learning_rate": 8.402061855670103e-05, |
|
"loss": 6.761, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.6205128205128205, |
|
"grad_norm": 0.5963282585144043, |
|
"learning_rate": 8.381443298969073e-05, |
|
"loss": 6.8233, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.641025641025641, |
|
"grad_norm": 0.7131335735321045, |
|
"learning_rate": 8.360824742268042e-05, |
|
"loss": 6.5581, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.6615384615384614, |
|
"grad_norm": 0.5245983004570007, |
|
"learning_rate": 8.340206185567011e-05, |
|
"loss": 6.5801, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.682051282051282, |
|
"grad_norm": 0.6111565232276917, |
|
"learning_rate": 8.31958762886598e-05, |
|
"loss": 6.6157, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.7025641025641025, |
|
"grad_norm": 0.7455165982246399, |
|
"learning_rate": 8.298969072164949e-05, |
|
"loss": 6.6316, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.7230769230769232, |
|
"grad_norm": 0.7735164761543274, |
|
"learning_rate": 8.278350515463917e-05, |
|
"loss": 6.6299, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.7435897435897436, |
|
"grad_norm": 0.5676758885383606, |
|
"learning_rate": 8.257731958762887e-05, |
|
"loss": 6.7204, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.764102564102564, |
|
"grad_norm": 0.8821406364440918, |
|
"learning_rate": 8.237113402061855e-05, |
|
"loss": 6.3924, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.7846153846153845, |
|
"grad_norm": 0.6726123094558716, |
|
"learning_rate": 8.216494845360825e-05, |
|
"loss": 6.7632, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.8051282051282052, |
|
"grad_norm": 0.5056736469268799, |
|
"learning_rate": 8.195876288659795e-05, |
|
"loss": 6.5864, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.8256410256410256, |
|
"grad_norm": 0.5093751549720764, |
|
"learning_rate": 8.175257731958763e-05, |
|
"loss": 6.5112, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.6652563810348511, |
|
"learning_rate": 8.154639175257733e-05, |
|
"loss": 6.5538, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 0.6594399213790894, |
|
"learning_rate": 8.134020618556701e-05, |
|
"loss": 6.6136, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.8871794871794871, |
|
"grad_norm": 0.6552271246910095, |
|
"learning_rate": 8.113402061855671e-05, |
|
"loss": 6.4301, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.9076923076923076, |
|
"grad_norm": 1.0746954679489136, |
|
"learning_rate": 8.092783505154639e-05, |
|
"loss": 6.3992, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.9282051282051282, |
|
"grad_norm": 0.5134009122848511, |
|
"learning_rate": 8.072164948453609e-05, |
|
"loss": 6.5104, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.9487179487179487, |
|
"grad_norm": 0.6844182014465332, |
|
"learning_rate": 8.051546391752578e-05, |
|
"loss": 6.4957, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.9692307692307693, |
|
"grad_norm": 0.5666801929473877, |
|
"learning_rate": 8.030927835051547e-05, |
|
"loss": 6.679, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.9897435897435898, |
|
"grad_norm": 0.7489727139472961, |
|
"learning_rate": 8.010309278350516e-05, |
|
"loss": 6.5199, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.01025641025641, |
|
"grad_norm": 0.6453472971916199, |
|
"learning_rate": 7.989690721649485e-05, |
|
"loss": 6.5507, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.0307692307692307, |
|
"grad_norm": 0.5807784199714661, |
|
"learning_rate": 7.969072164948455e-05, |
|
"loss": 6.7443, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.051282051282051, |
|
"grad_norm": 0.6182963848114014, |
|
"learning_rate": 7.948453608247423e-05, |
|
"loss": 6.4959, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.071794871794872, |
|
"grad_norm": 0.579248309135437, |
|
"learning_rate": 7.927835051546391e-05, |
|
"loss": 6.597, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.0923076923076924, |
|
"grad_norm": 0.5996450781822205, |
|
"learning_rate": 7.907216494845361e-05, |
|
"loss": 6.7087, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.112820512820513, |
|
"grad_norm": 0.4910239577293396, |
|
"learning_rate": 7.88659793814433e-05, |
|
"loss": 6.666, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 0.6239397525787354, |
|
"learning_rate": 7.865979381443299e-05, |
|
"loss": 6.4757, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.1538461538461537, |
|
"grad_norm": 0.581991970539093, |
|
"learning_rate": 7.845360824742269e-05, |
|
"loss": 6.4388, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.174358974358974, |
|
"grad_norm": 0.6894115209579468, |
|
"learning_rate": 7.824742268041237e-05, |
|
"loss": 6.7599, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.194871794871795, |
|
"grad_norm": 0.6936022043228149, |
|
"learning_rate": 7.804123711340207e-05, |
|
"loss": 6.5268, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.2153846153846155, |
|
"grad_norm": 0.6604471802711487, |
|
"learning_rate": 7.783505154639175e-05, |
|
"loss": 6.5513, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.235897435897436, |
|
"grad_norm": 0.6073073744773865, |
|
"learning_rate": 7.762886597938145e-05, |
|
"loss": 6.5175, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.2564102564102564, |
|
"grad_norm": 0.5706610679626465, |
|
"learning_rate": 7.742268041237114e-05, |
|
"loss": 6.6232, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.276923076923077, |
|
"grad_norm": 0.6433441042900085, |
|
"learning_rate": 7.721649484536083e-05, |
|
"loss": 6.324, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.2974358974358973, |
|
"grad_norm": 0.6643648743629456, |
|
"learning_rate": 7.701030927835053e-05, |
|
"loss": 6.5876, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.3179487179487177, |
|
"grad_norm": 0.6086193323135376, |
|
"learning_rate": 7.680412371134021e-05, |
|
"loss": 6.5513, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.3384615384615386, |
|
"grad_norm": 0.7945834994316101, |
|
"learning_rate": 7.65979381443299e-05, |
|
"loss": 6.4032, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.358974358974359, |
|
"grad_norm": 0.7115459442138672, |
|
"learning_rate": 7.639175257731959e-05, |
|
"loss": 6.7107, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.3794871794871795, |
|
"grad_norm": 0.6793861985206604, |
|
"learning_rate": 7.618556701030927e-05, |
|
"loss": 6.6648, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.7627358436584473, |
|
"learning_rate": 7.597938144329897e-05, |
|
"loss": 6.5648, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.4205128205128204, |
|
"grad_norm": 0.6426063179969788, |
|
"learning_rate": 7.577319587628867e-05, |
|
"loss": 6.2766, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.4410256410256412, |
|
"grad_norm": 0.6580905914306641, |
|
"learning_rate": 7.556701030927835e-05, |
|
"loss": 6.6025, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.4615384615384617, |
|
"grad_norm": 0.7617653608322144, |
|
"learning_rate": 7.536082474226805e-05, |
|
"loss": 6.5868, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.482051282051282, |
|
"grad_norm": 0.5646825432777405, |
|
"learning_rate": 7.515463917525773e-05, |
|
"loss": 6.6345, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.5025641025641026, |
|
"grad_norm": 0.637170135974884, |
|
"learning_rate": 7.494845360824743e-05, |
|
"loss": 6.4776, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.523076923076923, |
|
"grad_norm": 0.6873669028282166, |
|
"learning_rate": 7.474226804123711e-05, |
|
"loss": 6.5078, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.5435897435897434, |
|
"grad_norm": 0.5859996676445007, |
|
"learning_rate": 7.453608247422681e-05, |
|
"loss": 6.3846, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.564102564102564, |
|
"grad_norm": 0.760138750076294, |
|
"learning_rate": 7.43298969072165e-05, |
|
"loss": 6.4852, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.5846153846153848, |
|
"grad_norm": 0.650406002998352, |
|
"learning_rate": 7.412371134020619e-05, |
|
"loss": 6.57, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.605128205128205, |
|
"grad_norm": 0.591342031955719, |
|
"learning_rate": 7.391752577319589e-05, |
|
"loss": 6.5708, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.6256410256410256, |
|
"grad_norm": 0.5927435755729675, |
|
"learning_rate": 7.371134020618557e-05, |
|
"loss": 6.6123, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.646153846153846, |
|
"grad_norm": 0.7065881490707397, |
|
"learning_rate": 7.350515463917527e-05, |
|
"loss": 6.5514, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.4774336516857147, |
|
"learning_rate": 7.329896907216495e-05, |
|
"loss": 6.4391, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.6871794871794874, |
|
"grad_norm": 0.6040366888046265, |
|
"learning_rate": 7.309278350515463e-05, |
|
"loss": 6.5542, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.707692307692308, |
|
"grad_norm": 0.6922281980514526, |
|
"learning_rate": 7.288659793814433e-05, |
|
"loss": 6.4446, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.7282051282051283, |
|
"grad_norm": 0.8469168543815613, |
|
"learning_rate": 7.268041237113403e-05, |
|
"loss": 6.4419, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.7487179487179487, |
|
"grad_norm": 0.6104958057403564, |
|
"learning_rate": 7.247422680412371e-05, |
|
"loss": 6.5526, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.769230769230769, |
|
"grad_norm": 0.6809393167495728, |
|
"learning_rate": 7.226804123711341e-05, |
|
"loss": 6.6546, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.7897435897435896, |
|
"grad_norm": 0.7717563509941101, |
|
"learning_rate": 7.206185567010309e-05, |
|
"loss": 6.3678, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.81025641025641, |
|
"grad_norm": 0.7897918820381165, |
|
"learning_rate": 7.185567010309279e-05, |
|
"loss": 6.2131, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.830769230769231, |
|
"grad_norm": 0.6473929286003113, |
|
"learning_rate": 7.164948453608247e-05, |
|
"loss": 6.6045, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.8512820512820514, |
|
"grad_norm": 0.7253041863441467, |
|
"learning_rate": 7.144329896907217e-05, |
|
"loss": 6.4355, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.871794871794872, |
|
"grad_norm": 0.6646418571472168, |
|
"learning_rate": 7.123711340206187e-05, |
|
"loss": 6.4636, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.8923076923076922, |
|
"grad_norm": 0.6046567559242249, |
|
"learning_rate": 7.103092783505155e-05, |
|
"loss": 6.3511, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.9128205128205127, |
|
"grad_norm": 0.6396191120147705, |
|
"learning_rate": 7.082474226804125e-05, |
|
"loss": 6.5544, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.9333333333333336, |
|
"grad_norm": 0.7889509797096252, |
|
"learning_rate": 7.061855670103093e-05, |
|
"loss": 6.5913, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.953846153846154, |
|
"grad_norm": 0.7650195956230164, |
|
"learning_rate": 7.041237113402063e-05, |
|
"loss": 6.4351, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.9743589743589745, |
|
"grad_norm": 0.7612701654434204, |
|
"learning_rate": 7.020618556701031e-05, |
|
"loss": 6.3414, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.994871794871795, |
|
"grad_norm": 0.7275530695915222, |
|
"learning_rate": 7e-05, |
|
"loss": 6.5395, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.0153846153846153, |
|
"grad_norm": 0.6664620637893677, |
|
"learning_rate": 6.979381443298969e-05, |
|
"loss": 6.1818, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.0358974358974358, |
|
"grad_norm": 0.6935043334960938, |
|
"learning_rate": 6.958762886597939e-05, |
|
"loss": 6.2724, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.056410256410256, |
|
"grad_norm": 0.691132664680481, |
|
"learning_rate": 6.938144329896907e-05, |
|
"loss": 6.3618, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 0.6852958798408508, |
|
"learning_rate": 6.917525773195877e-05, |
|
"loss": 6.4248, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.0974358974358975, |
|
"grad_norm": 0.7287474274635315, |
|
"learning_rate": 6.896907216494845e-05, |
|
"loss": 6.5134, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.117948717948718, |
|
"grad_norm": 0.6913236379623413, |
|
"learning_rate": 6.876288659793815e-05, |
|
"loss": 6.5583, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.1384615384615384, |
|
"grad_norm": 0.7557226419448853, |
|
"learning_rate": 6.855670103092783e-05, |
|
"loss": 6.3671, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.158974358974359, |
|
"grad_norm": 0.673149049282074, |
|
"learning_rate": 6.835051546391753e-05, |
|
"loss": 6.463, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.1794871794871793, |
|
"grad_norm": 0.7111195921897888, |
|
"learning_rate": 6.814432989690723e-05, |
|
"loss": 6.4296, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.7462407946586609, |
|
"learning_rate": 6.793814432989691e-05, |
|
"loss": 6.3978, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.2205128205128206, |
|
"grad_norm": 0.633978009223938, |
|
"learning_rate": 6.773195876288661e-05, |
|
"loss": 6.5069, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.241025641025641, |
|
"grad_norm": 0.5375587940216064, |
|
"learning_rate": 6.752577319587629e-05, |
|
"loss": 6.5878, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.2615384615384615, |
|
"grad_norm": 0.6872283816337585, |
|
"learning_rate": 6.731958762886599e-05, |
|
"loss": 6.5541, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.282051282051282, |
|
"grad_norm": 0.7773441076278687, |
|
"learning_rate": 6.711340206185567e-05, |
|
"loss": 6.3327, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.3025641025641024, |
|
"grad_norm": 0.9842550754547119, |
|
"learning_rate": 6.690721649484535e-05, |
|
"loss": 6.4966, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.3230769230769233, |
|
"grad_norm": 0.5430623292922974, |
|
"learning_rate": 6.670103092783505e-05, |
|
"loss": 6.5381, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.3435897435897437, |
|
"grad_norm": 0.6485368609428406, |
|
"learning_rate": 6.649484536082475e-05, |
|
"loss": 6.4691, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.364102564102564, |
|
"grad_norm": 0.8230199217796326, |
|
"learning_rate": 6.628865979381443e-05, |
|
"loss": 6.3189, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 3.3846153846153846, |
|
"grad_norm": 0.6916872262954712, |
|
"learning_rate": 6.608247422680413e-05, |
|
"loss": 6.3726, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.405128205128205, |
|
"grad_norm": 0.6563546657562256, |
|
"learning_rate": 6.587628865979381e-05, |
|
"loss": 6.532, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 3.4256410256410255, |
|
"grad_norm": 0.8116714954376221, |
|
"learning_rate": 6.567010309278351e-05, |
|
"loss": 6.3005, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 3.4461538461538463, |
|
"grad_norm": 0.8624938726425171, |
|
"learning_rate": 6.546391752577319e-05, |
|
"loss": 6.2658, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 3.466666666666667, |
|
"grad_norm": 0.8297551274299622, |
|
"learning_rate": 6.525773195876289e-05, |
|
"loss": 6.3995, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.4871794871794872, |
|
"grad_norm": 0.6855469942092896, |
|
"learning_rate": 6.505154639175259e-05, |
|
"loss": 6.3061, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.5076923076923077, |
|
"grad_norm": 0.7813113331794739, |
|
"learning_rate": 6.484536082474227e-05, |
|
"loss": 6.3236, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.528205128205128, |
|
"grad_norm": 0.6506015062332153, |
|
"learning_rate": 6.463917525773197e-05, |
|
"loss": 6.4452, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.5487179487179485, |
|
"grad_norm": 0.7242425084114075, |
|
"learning_rate": 6.443298969072165e-05, |
|
"loss": 6.4236, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.569230769230769, |
|
"grad_norm": 0.7753481864929199, |
|
"learning_rate": 6.422680412371135e-05, |
|
"loss": 6.503, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.58974358974359, |
|
"grad_norm": 0.8971739411354065, |
|
"learning_rate": 6.402061855670103e-05, |
|
"loss": 6.2226, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.6102564102564103, |
|
"grad_norm": 0.719221293926239, |
|
"learning_rate": 6.381443298969071e-05, |
|
"loss": 6.4305, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.6307692307692307, |
|
"grad_norm": 0.7092213034629822, |
|
"learning_rate": 6.360824742268041e-05, |
|
"loss": 6.5107, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.651282051282051, |
|
"grad_norm": 0.69648277759552, |
|
"learning_rate": 6.340206185567011e-05, |
|
"loss": 6.1619, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.6717948717948716, |
|
"grad_norm": 0.7983478903770447, |
|
"learning_rate": 6.319587628865979e-05, |
|
"loss": 6.2945, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.6923076923076925, |
|
"grad_norm": 0.6972605586051941, |
|
"learning_rate": 6.298969072164949e-05, |
|
"loss": 6.3397, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.712820512820513, |
|
"grad_norm": 0.7316955924034119, |
|
"learning_rate": 6.278350515463917e-05, |
|
"loss": 6.5642, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.7333333333333334, |
|
"grad_norm": 0.819416344165802, |
|
"learning_rate": 6.257731958762887e-05, |
|
"loss": 6.5361, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.753846153846154, |
|
"grad_norm": 0.7884580492973328, |
|
"learning_rate": 6.237113402061855e-05, |
|
"loss": 6.1174, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.7743589743589743, |
|
"grad_norm": 0.7763500809669495, |
|
"learning_rate": 6.216494845360825e-05, |
|
"loss": 6.2631, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.7948717948717947, |
|
"grad_norm": 0.8002706170082092, |
|
"learning_rate": 6.195876288659795e-05, |
|
"loss": 6.3999, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.815384615384615, |
|
"grad_norm": 0.6830271482467651, |
|
"learning_rate": 6.175257731958763e-05, |
|
"loss": 6.4109, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.835897435897436, |
|
"grad_norm": 0.8474668860435486, |
|
"learning_rate": 6.154639175257733e-05, |
|
"loss": 6.2914, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.8564102564102565, |
|
"grad_norm": 0.7036705017089844, |
|
"learning_rate": 6.134020618556701e-05, |
|
"loss": 6.5863, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.876923076923077, |
|
"grad_norm": 0.6333104968070984, |
|
"learning_rate": 6.113402061855671e-05, |
|
"loss": 6.5162, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.8974358974358974, |
|
"grad_norm": 0.5705188512802124, |
|
"learning_rate": 6.092783505154639e-05, |
|
"loss": 6.3006, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.917948717948718, |
|
"grad_norm": 0.634800136089325, |
|
"learning_rate": 6.072164948453608e-05, |
|
"loss": 6.4401, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.9384615384615387, |
|
"grad_norm": 0.6740937232971191, |
|
"learning_rate": 6.051546391752577e-05, |
|
"loss": 6.698, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.958974358974359, |
|
"grad_norm": 0.9393010139465332, |
|
"learning_rate": 6.030927835051546e-05, |
|
"loss": 6.1999, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.9794871794871796, |
|
"grad_norm": 0.6255496740341187, |
|
"learning_rate": 6.010309278350515e-05, |
|
"loss": 6.3655, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.6616120934486389, |
|
"learning_rate": 5.989690721649484e-05, |
|
"loss": 6.4234, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.02051282051282, |
|
"grad_norm": 0.6440649032592773, |
|
"learning_rate": 5.969072164948454e-05, |
|
"loss": 6.3687, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 4.041025641025641, |
|
"grad_norm": 0.6960980296134949, |
|
"learning_rate": 5.948453608247423e-05, |
|
"loss": 6.453, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 4.061538461538461, |
|
"grad_norm": 0.8931025266647339, |
|
"learning_rate": 5.927835051546392e-05, |
|
"loss": 6.4502, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 4.082051282051282, |
|
"grad_norm": 0.6508329510688782, |
|
"learning_rate": 5.907216494845361e-05, |
|
"loss": 6.4387, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 4.102564102564102, |
|
"grad_norm": 0.7409791946411133, |
|
"learning_rate": 5.88659793814433e-05, |
|
"loss": 6.3901, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.123076923076923, |
|
"grad_norm": 0.6797041893005371, |
|
"learning_rate": 5.8659793814433e-05, |
|
"loss": 6.3299, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 4.143589743589744, |
|
"grad_norm": 0.7487124800682068, |
|
"learning_rate": 5.845360824742269e-05, |
|
"loss": 6.3846, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.164102564102564, |
|
"grad_norm": 0.6995207071304321, |
|
"learning_rate": 5.824742268041238e-05, |
|
"loss": 6.4029, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 4.184615384615385, |
|
"grad_norm": 0.920820951461792, |
|
"learning_rate": 5.804123711340207e-05, |
|
"loss": 6.398, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 4.205128205128205, |
|
"grad_norm": 0.7857652306556702, |
|
"learning_rate": 5.783505154639176e-05, |
|
"loss": 6.2116, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.225641025641026, |
|
"grad_norm": 0.655503511428833, |
|
"learning_rate": 5.762886597938144e-05, |
|
"loss": 6.4523, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 4.246153846153846, |
|
"grad_norm": 0.7093043327331543, |
|
"learning_rate": 5.742268041237113e-05, |
|
"loss": 6.2784, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 4.266666666666667, |
|
"grad_norm": 0.6597480177879333, |
|
"learning_rate": 5.721649484536082e-05, |
|
"loss": 6.4637, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 4.287179487179487, |
|
"grad_norm": 0.8404126763343811, |
|
"learning_rate": 5.7010309278350514e-05, |
|
"loss": 6.226, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 4.3076923076923075, |
|
"grad_norm": 0.7892007827758789, |
|
"learning_rate": 5.6804123711340204e-05, |
|
"loss": 6.3117, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.328205128205128, |
|
"grad_norm": 0.7826732397079468, |
|
"learning_rate": 5.65979381443299e-05, |
|
"loss": 6.2826, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 4.348717948717948, |
|
"grad_norm": 0.7233083248138428, |
|
"learning_rate": 5.639175257731959e-05, |
|
"loss": 6.3743, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 4.36923076923077, |
|
"grad_norm": 0.6873509287834167, |
|
"learning_rate": 5.618556701030928e-05, |
|
"loss": 6.3368, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 4.38974358974359, |
|
"grad_norm": 0.8165833353996277, |
|
"learning_rate": 5.597938144329897e-05, |
|
"loss": 6.4183, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 4.410256410256411, |
|
"grad_norm": 0.7139970064163208, |
|
"learning_rate": 5.577319587628866e-05, |
|
"loss": 6.3427, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.430769230769231, |
|
"grad_norm": 0.61057448387146, |
|
"learning_rate": 5.556701030927836e-05, |
|
"loss": 6.3747, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 4.4512820512820515, |
|
"grad_norm": 0.8155067563056946, |
|
"learning_rate": 5.536082474226805e-05, |
|
"loss": 6.1935, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 4.471794871794872, |
|
"grad_norm": 0.7220977544784546, |
|
"learning_rate": 5.515463917525774e-05, |
|
"loss": 6.4346, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 4.492307692307692, |
|
"grad_norm": 0.6980003714561462, |
|
"learning_rate": 5.494845360824743e-05, |
|
"loss": 6.3355, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 4.512820512820513, |
|
"grad_norm": 0.7350332736968994, |
|
"learning_rate": 5.474226804123712e-05, |
|
"loss": 6.2946, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.533333333333333, |
|
"grad_norm": 0.6564266085624695, |
|
"learning_rate": 5.45360824742268e-05, |
|
"loss": 6.3044, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 4.553846153846154, |
|
"grad_norm": 0.8935516476631165, |
|
"learning_rate": 5.4329896907216494e-05, |
|
"loss": 6.0784, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 4.574358974358974, |
|
"grad_norm": 0.6622797846794128, |
|
"learning_rate": 5.4123711340206184e-05, |
|
"loss": 6.3122, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 4.5948717948717945, |
|
"grad_norm": 0.6584466695785522, |
|
"learning_rate": 5.3917525773195874e-05, |
|
"loss": 6.4586, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 4.615384615384615, |
|
"grad_norm": 0.7214877605438232, |
|
"learning_rate": 5.3711340206185564e-05, |
|
"loss": 6.4671, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.635897435897435, |
|
"grad_norm": 0.6560644507408142, |
|
"learning_rate": 5.350515463917526e-05, |
|
"loss": 6.2472, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.656410256410257, |
|
"grad_norm": 0.7048325538635254, |
|
"learning_rate": 5.329896907216495e-05, |
|
"loss": 6.5175, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.676923076923077, |
|
"grad_norm": 0.6315538883209229, |
|
"learning_rate": 5.309278350515464e-05, |
|
"loss": 6.0901, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.697435897435898, |
|
"grad_norm": 0.6339179277420044, |
|
"learning_rate": 5.288659793814433e-05, |
|
"loss": 6.3935, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.717948717948718, |
|
"grad_norm": 0.8235088586807251, |
|
"learning_rate": 5.268041237113402e-05, |
|
"loss": 5.9898, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.7384615384615385, |
|
"grad_norm": 0.7074024081230164, |
|
"learning_rate": 5.247422680412372e-05, |
|
"loss": 6.1804, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.758974358974359, |
|
"grad_norm": 0.8095348477363586, |
|
"learning_rate": 5.226804123711341e-05, |
|
"loss": 6.2209, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.779487179487179, |
|
"grad_norm": 0.641472578048706, |
|
"learning_rate": 5.20618556701031e-05, |
|
"loss": 6.3102, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.7063437700271606, |
|
"learning_rate": 5.185567010309279e-05, |
|
"loss": 6.2348, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.82051282051282, |
|
"grad_norm": 0.7544019818305969, |
|
"learning_rate": 5.164948453608248e-05, |
|
"loss": 6.3763, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.841025641025641, |
|
"grad_norm": 0.827674925327301, |
|
"learning_rate": 5.1443298969072164e-05, |
|
"loss": 6.0069, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.861538461538462, |
|
"grad_norm": 0.8011050224304199, |
|
"learning_rate": 5.1237113402061854e-05, |
|
"loss": 6.257, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.8820512820512825, |
|
"grad_norm": 0.6386663913726807, |
|
"learning_rate": 5.1030927835051544e-05, |
|
"loss": 6.3936, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.902564102564103, |
|
"grad_norm": 0.7228955030441284, |
|
"learning_rate": 5.0824742268041235e-05, |
|
"loss": 6.4586, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.923076923076923, |
|
"grad_norm": 0.6252913475036621, |
|
"learning_rate": 5.0618556701030925e-05, |
|
"loss": 6.3536, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.943589743589744, |
|
"grad_norm": 0.7947326898574829, |
|
"learning_rate": 5.041237113402062e-05, |
|
"loss": 6.3856, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.964102564102564, |
|
"grad_norm": 0.6296289563179016, |
|
"learning_rate": 5.020618556701031e-05, |
|
"loss": 6.4445, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.984615384615385, |
|
"grad_norm": 0.6455388069152832, |
|
"learning_rate": 5e-05, |
|
"loss": 6.4503, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 5.005128205128205, |
|
"grad_norm": 0.7692280411720276, |
|
"learning_rate": 4.979381443298969e-05, |
|
"loss": 6.4011, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 5.0256410256410255, |
|
"grad_norm": 0.6310716867446899, |
|
"learning_rate": 4.958762886597938e-05, |
|
"loss": 6.3749, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 5.046153846153846, |
|
"grad_norm": 0.8823466897010803, |
|
"learning_rate": 4.938144329896908e-05, |
|
"loss": 6.2891, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 5.066666666666666, |
|
"grad_norm": 0.7742350101470947, |
|
"learning_rate": 4.9175257731958763e-05, |
|
"loss": 6.1306, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 5.087179487179487, |
|
"grad_norm": 0.6190592050552368, |
|
"learning_rate": 4.8969072164948454e-05, |
|
"loss": 6.3312, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 5.107692307692307, |
|
"grad_norm": 0.7244901657104492, |
|
"learning_rate": 4.8762886597938144e-05, |
|
"loss": 6.3803, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 5.128205128205128, |
|
"grad_norm": 0.7023468017578125, |
|
"learning_rate": 4.8556701030927834e-05, |
|
"loss": 6.2492, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.148717948717949, |
|
"grad_norm": 0.7719900608062744, |
|
"learning_rate": 4.835051546391753e-05, |
|
"loss": 6.1033, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 5.1692307692307695, |
|
"grad_norm": 0.8199257254600525, |
|
"learning_rate": 4.814432989690722e-05, |
|
"loss": 6.1981, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 5.18974358974359, |
|
"grad_norm": 0.8238953351974487, |
|
"learning_rate": 4.793814432989691e-05, |
|
"loss": 6.2671, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 5.21025641025641, |
|
"grad_norm": 0.5926157832145691, |
|
"learning_rate": 4.77319587628866e-05, |
|
"loss": 6.3837, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 5.230769230769231, |
|
"grad_norm": 0.6979117393493652, |
|
"learning_rate": 4.7525773195876285e-05, |
|
"loss": 6.2836, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.251282051282051, |
|
"grad_norm": 0.6661803126335144, |
|
"learning_rate": 4.731958762886598e-05, |
|
"loss": 6.1655, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 5.271794871794872, |
|
"grad_norm": 0.6727478504180908, |
|
"learning_rate": 4.711340206185567e-05, |
|
"loss": 6.0956, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 5.292307692307692, |
|
"grad_norm": 0.665337085723877, |
|
"learning_rate": 4.690721649484536e-05, |
|
"loss": 6.3724, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 5.312820512820513, |
|
"grad_norm": 0.702816903591156, |
|
"learning_rate": 4.670103092783505e-05, |
|
"loss": 6.2241, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 5.333333333333333, |
|
"grad_norm": 0.6713479161262512, |
|
"learning_rate": 4.6494845360824743e-05, |
|
"loss": 6.3077, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 5.3538461538461535, |
|
"grad_norm": 0.6276135444641113, |
|
"learning_rate": 4.628865979381444e-05, |
|
"loss": 6.4411, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 5.374358974358975, |
|
"grad_norm": 0.7353730201721191, |
|
"learning_rate": 4.6082474226804124e-05, |
|
"loss": 6.1735, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 5.394871794871795, |
|
"grad_norm": 0.7393136620521545, |
|
"learning_rate": 4.5876288659793814e-05, |
|
"loss": 6.413, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 5.415384615384616, |
|
"grad_norm": 0.5835318565368652, |
|
"learning_rate": 4.5670103092783504e-05, |
|
"loss": 6.4997, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 5.435897435897436, |
|
"grad_norm": 0.6543262600898743, |
|
"learning_rate": 4.5463917525773195e-05, |
|
"loss": 6.3292, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.456410256410257, |
|
"grad_norm": 0.7140960693359375, |
|
"learning_rate": 4.525773195876289e-05, |
|
"loss": 6.3267, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 5.476923076923077, |
|
"grad_norm": 0.6978656649589539, |
|
"learning_rate": 4.505154639175258e-05, |
|
"loss": 6.4343, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 5.4974358974358974, |
|
"grad_norm": 0.7488942742347717, |
|
"learning_rate": 4.484536082474227e-05, |
|
"loss": 6.3557, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 5.517948717948718, |
|
"grad_norm": 0.753161609172821, |
|
"learning_rate": 4.463917525773196e-05, |
|
"loss": 6.2488, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 5.538461538461538, |
|
"grad_norm": 0.7196393609046936, |
|
"learning_rate": 4.4432989690721646e-05, |
|
"loss": 6.2132, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.558974358974359, |
|
"grad_norm": 0.681937038898468, |
|
"learning_rate": 4.422680412371134e-05, |
|
"loss": 6.1831, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 5.579487179487179, |
|
"grad_norm": 0.6542847752571106, |
|
"learning_rate": 4.402061855670103e-05, |
|
"loss": 6.3156, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 0.7364455461502075, |
|
"learning_rate": 4.3814432989690723e-05, |
|
"loss": 6.3007, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 5.62051282051282, |
|
"grad_norm": 0.7742012739181519, |
|
"learning_rate": 4.3608247422680414e-05, |
|
"loss": 5.9234, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 5.641025641025641, |
|
"grad_norm": 0.7302541136741638, |
|
"learning_rate": 4.3402061855670104e-05, |
|
"loss": 6.3115, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.661538461538462, |
|
"grad_norm": 0.7138387560844421, |
|
"learning_rate": 4.31958762886598e-05, |
|
"loss": 6.1719, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 5.682051282051282, |
|
"grad_norm": 0.6778889298439026, |
|
"learning_rate": 4.298969072164949e-05, |
|
"loss": 6.1886, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 5.702564102564103, |
|
"grad_norm": 0.7845616936683655, |
|
"learning_rate": 4.2783505154639175e-05, |
|
"loss": 6.1925, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 5.723076923076923, |
|
"grad_norm": 0.780630886554718, |
|
"learning_rate": 4.2577319587628865e-05, |
|
"loss": 6.3325, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 5.743589743589744, |
|
"grad_norm": 0.630280613899231, |
|
"learning_rate": 4.2371134020618555e-05, |
|
"loss": 6.3334, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.764102564102564, |
|
"grad_norm": 0.7802500128746033, |
|
"learning_rate": 4.216494845360825e-05, |
|
"loss": 6.2872, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 5.7846153846153845, |
|
"grad_norm": 0.6819648146629333, |
|
"learning_rate": 4.195876288659794e-05, |
|
"loss": 6.3133, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 5.805128205128205, |
|
"grad_norm": 0.7848822474479675, |
|
"learning_rate": 4.175257731958763e-05, |
|
"loss": 6.4798, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 5.825641025641025, |
|
"grad_norm": 0.7532023787498474, |
|
"learning_rate": 4.154639175257732e-05, |
|
"loss": 6.1145, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 5.846153846153846, |
|
"grad_norm": 0.679144561290741, |
|
"learning_rate": 4.1340206185567006e-05, |
|
"loss": 6.2509, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.866666666666667, |
|
"grad_norm": 0.7792911529541016, |
|
"learning_rate": 4.1134020618556704e-05, |
|
"loss": 6.1544, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 5.887179487179488, |
|
"grad_norm": 0.7862560153007507, |
|
"learning_rate": 4.0927835051546394e-05, |
|
"loss": 6.2661, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 5.907692307692308, |
|
"grad_norm": 0.6902645826339722, |
|
"learning_rate": 4.0721649484536084e-05, |
|
"loss": 6.3423, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 5.9282051282051285, |
|
"grad_norm": 0.6430610418319702, |
|
"learning_rate": 4.0515463917525774e-05, |
|
"loss": 6.3058, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 5.948717948717949, |
|
"grad_norm": 0.7760281562805176, |
|
"learning_rate": 4.0309278350515465e-05, |
|
"loss": 6.3007, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.969230769230769, |
|
"grad_norm": 0.7323822975158691, |
|
"learning_rate": 4.010309278350516e-05, |
|
"loss": 6.3434, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 5.98974358974359, |
|
"grad_norm": 0.7060137391090393, |
|
"learning_rate": 3.989690721649485e-05, |
|
"loss": 6.2571, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 6.01025641025641, |
|
"grad_norm": 0.7796257734298706, |
|
"learning_rate": 3.9690721649484535e-05, |
|
"loss": 6.1228, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 6.030769230769231, |
|
"grad_norm": 0.8794940710067749, |
|
"learning_rate": 3.9484536082474226e-05, |
|
"loss": 6.3449, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 6.051282051282051, |
|
"grad_norm": 0.6452521681785583, |
|
"learning_rate": 3.9278350515463916e-05, |
|
"loss": 6.0527, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 6.0717948717948715, |
|
"grad_norm": 0.7071980834007263, |
|
"learning_rate": 3.907216494845361e-05, |
|
"loss": 6.1828, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 6.092307692307692, |
|
"grad_norm": 0.6614767909049988, |
|
"learning_rate": 3.88659793814433e-05, |
|
"loss": 6.2702, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 6.112820512820512, |
|
"grad_norm": 0.6533995866775513, |
|
"learning_rate": 3.865979381443299e-05, |
|
"loss": 6.2421, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 6.133333333333334, |
|
"grad_norm": 0.7907997965812683, |
|
"learning_rate": 3.8453608247422684e-05, |
|
"loss": 6.4282, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 0.709160327911377, |
|
"learning_rate": 3.824742268041237e-05, |
|
"loss": 6.0814, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.174358974358975, |
|
"grad_norm": 0.8180028796195984, |
|
"learning_rate": 3.8041237113402064e-05, |
|
"loss": 6.3014, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 6.194871794871795, |
|
"grad_norm": 0.6765635013580322, |
|
"learning_rate": 3.7835051546391754e-05, |
|
"loss": 6.3844, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 6.2153846153846155, |
|
"grad_norm": 0.6009554862976074, |
|
"learning_rate": 3.7628865979381445e-05, |
|
"loss": 6.1951, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 6.235897435897436, |
|
"grad_norm": 0.708351731300354, |
|
"learning_rate": 3.7422680412371135e-05, |
|
"loss": 6.4139, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 6.256410256410256, |
|
"grad_norm": 0.7396306395530701, |
|
"learning_rate": 3.7216494845360825e-05, |
|
"loss": 6.194, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 6.276923076923077, |
|
"grad_norm": 0.772692859172821, |
|
"learning_rate": 3.701030927835052e-05, |
|
"loss": 6.2968, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 6.297435897435897, |
|
"grad_norm": 0.8002333641052246, |
|
"learning_rate": 3.680412371134021e-05, |
|
"loss": 6.2677, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 6.317948717948718, |
|
"grad_norm": 1.078007698059082, |
|
"learning_rate": 3.6597938144329896e-05, |
|
"loss": 6.2674, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 6.338461538461538, |
|
"grad_norm": 0.6645541787147522, |
|
"learning_rate": 3.6391752577319586e-05, |
|
"loss": 6.1999, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 6.358974358974359, |
|
"grad_norm": 0.6708665490150452, |
|
"learning_rate": 3.6185567010309276e-05, |
|
"loss": 6.4461, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 6.37948717948718, |
|
"grad_norm": 0.6594524383544922, |
|
"learning_rate": 3.597938144329897e-05, |
|
"loss": 6.2954, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 0.9436811804771423, |
|
"learning_rate": 3.5773195876288664e-05, |
|
"loss": 6.1473, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 6.420512820512821, |
|
"grad_norm": 0.7767890691757202, |
|
"learning_rate": 3.5567010309278354e-05, |
|
"loss": 6.2695, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 6.441025641025641, |
|
"grad_norm": 0.8223767876625061, |
|
"learning_rate": 3.5360824742268044e-05, |
|
"loss": 6.2998, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 6.461538461538462, |
|
"grad_norm": 0.753822922706604, |
|
"learning_rate": 3.515463917525773e-05, |
|
"loss": 6.1723, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 6.482051282051282, |
|
"grad_norm": 0.7371943593025208, |
|
"learning_rate": 3.4948453608247425e-05, |
|
"loss": 6.2228, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 6.5025641025641026, |
|
"grad_norm": 0.6721214056015015, |
|
"learning_rate": 3.4742268041237115e-05, |
|
"loss": 6.06, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 6.523076923076923, |
|
"grad_norm": 0.7266871333122253, |
|
"learning_rate": 3.4536082474226805e-05, |
|
"loss": 6.3087, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 6.543589743589743, |
|
"grad_norm": 0.7427794933319092, |
|
"learning_rate": 3.4329896907216495e-05, |
|
"loss": 6.2757, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 6.564102564102564, |
|
"grad_norm": 0.7372191548347473, |
|
"learning_rate": 3.4123711340206186e-05, |
|
"loss": 6.1809, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 6.584615384615384, |
|
"grad_norm": 0.773668646812439, |
|
"learning_rate": 3.391752577319588e-05, |
|
"loss": 6.0375, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 6.605128205128205, |
|
"grad_norm": 0.8196036219596863, |
|
"learning_rate": 3.371134020618557e-05, |
|
"loss": 6.3719, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 6.625641025641025, |
|
"grad_norm": 0.6300514936447144, |
|
"learning_rate": 3.3505154639175256e-05, |
|
"loss": 6.338, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 6.6461538461538465, |
|
"grad_norm": 0.5757874846458435, |
|
"learning_rate": 3.3298969072164947e-05, |
|
"loss": 6.4443, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.8658593893051147, |
|
"learning_rate": 3.309278350515464e-05, |
|
"loss": 6.1216, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.687179487179487, |
|
"grad_norm": 0.7729750275611877, |
|
"learning_rate": 3.2886597938144334e-05, |
|
"loss": 5.9493, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 6.707692307692308, |
|
"grad_norm": 0.7950991988182068, |
|
"learning_rate": 3.2680412371134024e-05, |
|
"loss": 6.1614, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 6.728205128205128, |
|
"grad_norm": 0.7309786081314087, |
|
"learning_rate": 3.2474226804123714e-05, |
|
"loss": 6.2484, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 6.748717948717949, |
|
"grad_norm": 0.7773774266242981, |
|
"learning_rate": 3.2268041237113405e-05, |
|
"loss": 6.1015, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 6.769230769230769, |
|
"grad_norm": 0.6493405103683472, |
|
"learning_rate": 3.2061855670103095e-05, |
|
"loss": 6.3609, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 6.78974358974359, |
|
"grad_norm": 0.7441650032997131, |
|
"learning_rate": 3.1855670103092785e-05, |
|
"loss": 6.2702, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 6.81025641025641, |
|
"grad_norm": 0.6217365264892578, |
|
"learning_rate": 3.1649484536082475e-05, |
|
"loss": 6.2799, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 6.8307692307692305, |
|
"grad_norm": 0.84025639295578, |
|
"learning_rate": 3.1443298969072166e-05, |
|
"loss": 6.132, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 6.851282051282051, |
|
"grad_norm": 0.7524210214614868, |
|
"learning_rate": 3.1237113402061856e-05, |
|
"loss": 6.1726, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 6.871794871794872, |
|
"grad_norm": 0.6827425956726074, |
|
"learning_rate": 3.1030927835051546e-05, |
|
"loss": 6.2313, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 6.892307692307693, |
|
"grad_norm": 0.6644515991210938, |
|
"learning_rate": 3.082474226804124e-05, |
|
"loss": 6.2285, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 6.912820512820513, |
|
"grad_norm": 0.7661225199699402, |
|
"learning_rate": 3.0618556701030933e-05, |
|
"loss": 6.3277, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 6.933333333333334, |
|
"grad_norm": 0.645756185054779, |
|
"learning_rate": 3.0412371134020617e-05, |
|
"loss": 6.2675, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 6.953846153846154, |
|
"grad_norm": 0.8538247346878052, |
|
"learning_rate": 3.020618556701031e-05, |
|
"loss": 6.0726, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 6.9743589743589745, |
|
"grad_norm": 0.6849417686462402, |
|
"learning_rate": 3e-05, |
|
"loss": 6.2466, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.994871794871795, |
|
"grad_norm": 0.7335018515586853, |
|
"learning_rate": 2.979381443298969e-05, |
|
"loss": 6.1795, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 7.015384615384615, |
|
"grad_norm": 0.7281456589698792, |
|
"learning_rate": 2.9587628865979385e-05, |
|
"loss": 6.2103, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 7.035897435897436, |
|
"grad_norm": 0.6370302438735962, |
|
"learning_rate": 2.9381443298969075e-05, |
|
"loss": 6.2624, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 7.056410256410256, |
|
"grad_norm": 0.7385585904121399, |
|
"learning_rate": 2.9175257731958765e-05, |
|
"loss": 6.1058, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 7.076923076923077, |
|
"grad_norm": 0.7029441595077515, |
|
"learning_rate": 2.896907216494846e-05, |
|
"loss": 5.897, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 7.097435897435897, |
|
"grad_norm": 0.7929611802101135, |
|
"learning_rate": 2.8762886597938142e-05, |
|
"loss": 6.0785, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 7.1179487179487175, |
|
"grad_norm": 0.7209746837615967, |
|
"learning_rate": 2.8556701030927836e-05, |
|
"loss": 6.3427, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 7.138461538461539, |
|
"grad_norm": 0.7581877112388611, |
|
"learning_rate": 2.8350515463917526e-05, |
|
"loss": 6.1258, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 7.158974358974359, |
|
"grad_norm": 0.7337011694908142, |
|
"learning_rate": 2.8144329896907216e-05, |
|
"loss": 6.2402, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 7.17948717948718, |
|
"grad_norm": 0.8132451772689819, |
|
"learning_rate": 2.793814432989691e-05, |
|
"loss": 6.2399, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 0.6459912061691284, |
|
"learning_rate": 2.77319587628866e-05, |
|
"loss": 6.3475, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 7.220512820512821, |
|
"grad_norm": 0.7494829893112183, |
|
"learning_rate": 2.752577319587629e-05, |
|
"loss": 6.1483, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 7.241025641025641, |
|
"grad_norm": 0.7804868221282959, |
|
"learning_rate": 2.7319587628865977e-05, |
|
"loss": 5.9649, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 7.2615384615384615, |
|
"grad_norm": 0.6681525111198425, |
|
"learning_rate": 2.711340206185567e-05, |
|
"loss": 6.1267, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 7.282051282051282, |
|
"grad_norm": 0.8127874732017517, |
|
"learning_rate": 2.690721649484536e-05, |
|
"loss": 6.1324, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 7.302564102564102, |
|
"grad_norm": 0.690651535987854, |
|
"learning_rate": 2.670103092783505e-05, |
|
"loss": 6.3486, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 7.323076923076923, |
|
"grad_norm": 0.7900473475456238, |
|
"learning_rate": 2.6494845360824745e-05, |
|
"loss": 6.2113, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 7.343589743589743, |
|
"grad_norm": 0.9337382316589355, |
|
"learning_rate": 2.6288659793814435e-05, |
|
"loss": 6.2222, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 7.364102564102564, |
|
"grad_norm": 0.6768183708190918, |
|
"learning_rate": 2.6082474226804126e-05, |
|
"loss": 6.0611, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 7.384615384615385, |
|
"grad_norm": 0.8045716881752014, |
|
"learning_rate": 2.587628865979382e-05, |
|
"loss": 6.1869, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 7.4051282051282055, |
|
"grad_norm": 0.7803860902786255, |
|
"learning_rate": 2.5670103092783503e-05, |
|
"loss": 6.2456, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 7.425641025641026, |
|
"grad_norm": 0.6583536863327026, |
|
"learning_rate": 2.5463917525773196e-05, |
|
"loss": 6.3823, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 7.446153846153846, |
|
"grad_norm": 0.6829738616943359, |
|
"learning_rate": 2.5257731958762887e-05, |
|
"loss": 5.9673, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 7.466666666666667, |
|
"grad_norm": 0.6171258091926575, |
|
"learning_rate": 2.5051546391752577e-05, |
|
"loss": 6.4778, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 7.487179487179487, |
|
"grad_norm": 0.9100000858306885, |
|
"learning_rate": 2.484536082474227e-05, |
|
"loss": 6.1923, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 7.507692307692308, |
|
"grad_norm": 0.6728296279907227, |
|
"learning_rate": 2.463917525773196e-05, |
|
"loss": 6.1936, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 7.528205128205128, |
|
"grad_norm": 0.6537327766418457, |
|
"learning_rate": 2.443298969072165e-05, |
|
"loss": 6.3321, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 7.5487179487179485, |
|
"grad_norm": 0.7532163858413696, |
|
"learning_rate": 2.422680412371134e-05, |
|
"loss": 6.263, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 7.569230769230769, |
|
"grad_norm": 0.8065058588981628, |
|
"learning_rate": 2.402061855670103e-05, |
|
"loss": 6.187, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 7.589743589743589, |
|
"grad_norm": 0.6982044577598572, |
|
"learning_rate": 2.3814432989690722e-05, |
|
"loss": 6.2071, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 7.61025641025641, |
|
"grad_norm": 0.7518334984779358, |
|
"learning_rate": 2.3608247422680412e-05, |
|
"loss": 6.1698, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 7.63076923076923, |
|
"grad_norm": 0.6980510950088501, |
|
"learning_rate": 2.3402061855670106e-05, |
|
"loss": 6.1269, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 7.651282051282052, |
|
"grad_norm": 0.7211715579032898, |
|
"learning_rate": 2.3195876288659796e-05, |
|
"loss": 6.2005, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 7.671794871794872, |
|
"grad_norm": 0.7351675033569336, |
|
"learning_rate": 2.2989690721649483e-05, |
|
"loss": 6.1096, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 7.6923076923076925, |
|
"grad_norm": 0.6142987608909607, |
|
"learning_rate": 2.2783505154639176e-05, |
|
"loss": 6.0628, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 7.712820512820513, |
|
"grad_norm": 0.6700417995452881, |
|
"learning_rate": 2.2577319587628867e-05, |
|
"loss": 6.2528, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 7.733333333333333, |
|
"grad_norm": 0.7297637462615967, |
|
"learning_rate": 2.237113402061856e-05, |
|
"loss": 6.165, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 7.753846153846154, |
|
"grad_norm": 0.6762688755989075, |
|
"learning_rate": 2.2164948453608247e-05, |
|
"loss": 6.3651, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 7.774358974358974, |
|
"grad_norm": 0.6471550464630127, |
|
"learning_rate": 2.1958762886597937e-05, |
|
"loss": 6.142, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 7.794871794871795, |
|
"grad_norm": 0.6905655264854431, |
|
"learning_rate": 2.175257731958763e-05, |
|
"loss": 6.4471, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 7.815384615384615, |
|
"grad_norm": 0.6673563122749329, |
|
"learning_rate": 2.154639175257732e-05, |
|
"loss": 6.0799, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 7.835897435897436, |
|
"grad_norm": 0.7106631994247437, |
|
"learning_rate": 2.134020618556701e-05, |
|
"loss": 6.3385, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 7.856410256410256, |
|
"grad_norm": 0.7296525239944458, |
|
"learning_rate": 2.1134020618556702e-05, |
|
"loss": 6.1772, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 7.876923076923077, |
|
"grad_norm": 0.6923534274101257, |
|
"learning_rate": 2.0927835051546392e-05, |
|
"loss": 6.0845, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 7.897435897435898, |
|
"grad_norm": 0.6479873061180115, |
|
"learning_rate": 2.0721649484536086e-05, |
|
"loss": 6.4237, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 7.917948717948718, |
|
"grad_norm": 0.7043232321739197, |
|
"learning_rate": 2.0515463917525773e-05, |
|
"loss": 6.2389, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 7.938461538461539, |
|
"grad_norm": 0.6955374479293823, |
|
"learning_rate": 2.0309278350515466e-05, |
|
"loss": 6.2594, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 7.958974358974359, |
|
"grad_norm": 0.6248512864112854, |
|
"learning_rate": 2.0103092783505157e-05, |
|
"loss": 6.3424, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 7.97948717948718, |
|
"grad_norm": 0.7696142196655273, |
|
"learning_rate": 1.9896907216494843e-05, |
|
"loss": 6.0404, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.7960737347602844, |
|
"learning_rate": 1.9690721649484537e-05, |
|
"loss": 6.2037, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 8.02051282051282, |
|
"grad_norm": 0.764180600643158, |
|
"learning_rate": 1.9484536082474227e-05, |
|
"loss": 6.0193, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 8.04102564102564, |
|
"grad_norm": 0.7745684385299683, |
|
"learning_rate": 1.927835051546392e-05, |
|
"loss": 6.1723, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 8.061538461538461, |
|
"grad_norm": 0.7468089461326599, |
|
"learning_rate": 1.9072164948453608e-05, |
|
"loss": 5.989, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 8.082051282051282, |
|
"grad_norm": 0.7076855301856995, |
|
"learning_rate": 1.8865979381443298e-05, |
|
"loss": 6.2975, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 8.102564102564102, |
|
"grad_norm": 0.750068187713623, |
|
"learning_rate": 1.865979381443299e-05, |
|
"loss": 6.2057, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 8.123076923076923, |
|
"grad_norm": 0.5782350301742554, |
|
"learning_rate": 1.8453608247422682e-05, |
|
"loss": 6.3482, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 8.143589743589743, |
|
"grad_norm": 0.7477689981460571, |
|
"learning_rate": 1.8247422680412372e-05, |
|
"loss": 6.1132, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 8.164102564102564, |
|
"grad_norm": 0.7583786845207214, |
|
"learning_rate": 1.8041237113402062e-05, |
|
"loss": 6.3927, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 8.184615384615384, |
|
"grad_norm": 1.066887378692627, |
|
"learning_rate": 1.7835051546391753e-05, |
|
"loss": 6.3036, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 8.205128205128204, |
|
"grad_norm": 0.7979178428649902, |
|
"learning_rate": 1.7628865979381446e-05, |
|
"loss": 6.0357, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.225641025641025, |
|
"grad_norm": 0.7925027012825012, |
|
"learning_rate": 1.7422680412371133e-05, |
|
"loss": 6.1571, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 8.246153846153845, |
|
"grad_norm": 0.6301718950271606, |
|
"learning_rate": 1.7216494845360827e-05, |
|
"loss": 6.2834, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 8.266666666666667, |
|
"grad_norm": 0.8579639196395874, |
|
"learning_rate": 1.7010309278350517e-05, |
|
"loss": 6.1507, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 8.287179487179488, |
|
"grad_norm": 0.6312888264656067, |
|
"learning_rate": 1.6804123711340207e-05, |
|
"loss": 6.4582, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 8.307692307692308, |
|
"grad_norm": 0.7432986497879028, |
|
"learning_rate": 1.6597938144329898e-05, |
|
"loss": 6.2031, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 8.328205128205129, |
|
"grad_norm": 0.6533825993537903, |
|
"learning_rate": 1.6391752577319588e-05, |
|
"loss": 6.2383, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 8.34871794871795, |
|
"grad_norm": 0.7833907604217529, |
|
"learning_rate": 1.618556701030928e-05, |
|
"loss": 6.2307, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 8.36923076923077, |
|
"grad_norm": 0.7582297325134277, |
|
"learning_rate": 1.5979381443298968e-05, |
|
"loss": 6.2063, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 8.38974358974359, |
|
"grad_norm": 0.7262023687362671, |
|
"learning_rate": 1.577319587628866e-05, |
|
"loss": 6.2125, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 8.41025641025641, |
|
"grad_norm": 0.833846926689148, |
|
"learning_rate": 1.5567010309278352e-05, |
|
"loss": 5.9073, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 8.430769230769231, |
|
"grad_norm": 0.6528037190437317, |
|
"learning_rate": 1.5360824742268042e-05, |
|
"loss": 6.1687, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 8.451282051282051, |
|
"grad_norm": 0.715062141418457, |
|
"learning_rate": 1.5154639175257731e-05, |
|
"loss": 6.3858, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 8.471794871794872, |
|
"grad_norm": 0.6965751647949219, |
|
"learning_rate": 1.4948453608247423e-05, |
|
"loss": 6.2247, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 8.492307692307692, |
|
"grad_norm": 0.644185483455658, |
|
"learning_rate": 1.4742268041237115e-05, |
|
"loss": 6.4232, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 8.512820512820513, |
|
"grad_norm": 0.8024539351463318, |
|
"learning_rate": 1.4536082474226805e-05, |
|
"loss": 6.2306, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 8.533333333333333, |
|
"grad_norm": 0.6547759175300598, |
|
"learning_rate": 1.4329896907216495e-05, |
|
"loss": 6.2272, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 8.553846153846154, |
|
"grad_norm": 0.8308131694793701, |
|
"learning_rate": 1.4123711340206186e-05, |
|
"loss": 6.2751, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 8.574358974358974, |
|
"grad_norm": 0.6475526094436646, |
|
"learning_rate": 1.3917525773195878e-05, |
|
"loss": 6.3477, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 8.594871794871795, |
|
"grad_norm": 0.7138957977294922, |
|
"learning_rate": 1.371134020618557e-05, |
|
"loss": 6.2169, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 8.615384615384615, |
|
"grad_norm": 0.7940370440483093, |
|
"learning_rate": 1.3505154639175258e-05, |
|
"loss": 6.0711, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 8.635897435897435, |
|
"grad_norm": 0.710660457611084, |
|
"learning_rate": 1.3298969072164948e-05, |
|
"loss": 6.1328, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 8.656410256410256, |
|
"grad_norm": 0.6740992069244385, |
|
"learning_rate": 1.309278350515464e-05, |
|
"loss": 6.2399, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 8.676923076923076, |
|
"grad_norm": 0.6954624652862549, |
|
"learning_rate": 1.2886597938144329e-05, |
|
"loss": 5.8876, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 8.697435897435897, |
|
"grad_norm": 0.6282210350036621, |
|
"learning_rate": 1.268041237113402e-05, |
|
"loss": 6.2823, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 8.717948717948717, |
|
"grad_norm": 0.6641991138458252, |
|
"learning_rate": 1.2474226804123713e-05, |
|
"loss": 6.0473, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 8.73846153846154, |
|
"grad_norm": 0.9313386678695679, |
|
"learning_rate": 1.2268041237113401e-05, |
|
"loss": 5.9459, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 8.75897435897436, |
|
"grad_norm": 0.6434590816497803, |
|
"learning_rate": 1.2061855670103093e-05, |
|
"loss": 6.2093, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 8.77948717948718, |
|
"grad_norm": 0.7539594173431396, |
|
"learning_rate": 1.1855670103092783e-05, |
|
"loss": 6.1687, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 0.7946255803108215, |
|
"learning_rate": 1.1649484536082475e-05, |
|
"loss": 6.1376, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 8.820512820512821, |
|
"grad_norm": 0.7640373706817627, |
|
"learning_rate": 1.1443298969072166e-05, |
|
"loss": 6.1806, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 8.841025641025642, |
|
"grad_norm": 0.6656478643417358, |
|
"learning_rate": 1.1237113402061856e-05, |
|
"loss": 6.0972, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 8.861538461538462, |
|
"grad_norm": 0.7396649718284607, |
|
"learning_rate": 1.1030927835051546e-05, |
|
"loss": 6.0908, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 8.882051282051282, |
|
"grad_norm": 0.6619882583618164, |
|
"learning_rate": 1.0824742268041238e-05, |
|
"loss": 6.4136, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 8.902564102564103, |
|
"grad_norm": 0.7462313175201416, |
|
"learning_rate": 1.0618556701030928e-05, |
|
"loss": 6.0462, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 8.923076923076923, |
|
"grad_norm": 1.2745590209960938, |
|
"learning_rate": 1.041237113402062e-05, |
|
"loss": 6.0757, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 8.943589743589744, |
|
"grad_norm": 0.7905880212783813, |
|
"learning_rate": 1.0206185567010309e-05, |
|
"loss": 5.9552, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 8.964102564102564, |
|
"grad_norm": 0.7253865003585815, |
|
"learning_rate": 1e-05, |
|
"loss": 6.2264, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 8.984615384615385, |
|
"grad_norm": 0.7785334587097168, |
|
"learning_rate": 9.793814432989691e-06, |
|
"loss": 6.195, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 9.005128205128205, |
|
"grad_norm": 0.8180606961250305, |
|
"learning_rate": 9.587628865979383e-06, |
|
"loss": 5.991, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 9.025641025641026, |
|
"grad_norm": 0.7430230379104614, |
|
"learning_rate": 9.381443298969073e-06, |
|
"loss": 5.9541, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 9.046153846153846, |
|
"grad_norm": 0.6766589879989624, |
|
"learning_rate": 9.175257731958762e-06, |
|
"loss": 6.2283, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 9.066666666666666, |
|
"grad_norm": 0.7492343187332153, |
|
"learning_rate": 8.969072164948454e-06, |
|
"loss": 6.1433, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 9.087179487179487, |
|
"grad_norm": 0.787506103515625, |
|
"learning_rate": 8.762886597938144e-06, |
|
"loss": 5.7677, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 9.107692307692307, |
|
"grad_norm": 0.6319190859794617, |
|
"learning_rate": 8.556701030927836e-06, |
|
"loss": 6.2569, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 9.128205128205128, |
|
"grad_norm": 0.5653165578842163, |
|
"learning_rate": 8.350515463917526e-06, |
|
"loss": 6.3534, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 9.148717948717948, |
|
"grad_norm": 1.010986566543579, |
|
"learning_rate": 8.144329896907216e-06, |
|
"loss": 6.2486, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 9.169230769230769, |
|
"grad_norm": 0.7131024599075317, |
|
"learning_rate": 7.938144329896907e-06, |
|
"loss": 6.0887, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 9.189743589743589, |
|
"grad_norm": 0.7308672666549683, |
|
"learning_rate": 7.731958762886599e-06, |
|
"loss": 6.1484, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 9.21025641025641, |
|
"grad_norm": 0.7401643991470337, |
|
"learning_rate": 7.525773195876289e-06, |
|
"loss": 6.1046, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 0.7252262830734253, |
|
"learning_rate": 7.31958762886598e-06, |
|
"loss": 6.2832, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.25128205128205, |
|
"grad_norm": 0.7304520606994629, |
|
"learning_rate": 7.11340206185567e-06, |
|
"loss": 6.2027, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 9.271794871794873, |
|
"grad_norm": 0.6221430897712708, |
|
"learning_rate": 6.907216494845361e-06, |
|
"loss": 6.3802, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 9.292307692307693, |
|
"grad_norm": 0.7544053792953491, |
|
"learning_rate": 6.701030927835052e-06, |
|
"loss": 6.1048, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 9.312820512820513, |
|
"grad_norm": 0.6470032930374146, |
|
"learning_rate": 6.494845360824743e-06, |
|
"loss": 6.3017, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 9.333333333333334, |
|
"grad_norm": 0.5889255404472351, |
|
"learning_rate": 6.288659793814433e-06, |
|
"loss": 6.2106, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 9.353846153846154, |
|
"grad_norm": 0.6223869323730469, |
|
"learning_rate": 6.082474226804124e-06, |
|
"loss": 6.1163, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 9.374358974358975, |
|
"grad_norm": 0.6783230304718018, |
|
"learning_rate": 5.876288659793814e-06, |
|
"loss": 6.1365, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 9.394871794871795, |
|
"grad_norm": 0.7757629752159119, |
|
"learning_rate": 5.670103092783505e-06, |
|
"loss": 6.0445, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 9.415384615384616, |
|
"grad_norm": 0.7045403122901917, |
|
"learning_rate": 5.4639175257731965e-06, |
|
"loss": 6.3132, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 9.435897435897436, |
|
"grad_norm": 0.7770035862922668, |
|
"learning_rate": 5.257731958762887e-06, |
|
"loss": 6.0804, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 9.456410256410257, |
|
"grad_norm": 0.6999940276145935, |
|
"learning_rate": 5.051546391752578e-06, |
|
"loss": 6.0772, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 9.476923076923077, |
|
"grad_norm": 0.7113653421401978, |
|
"learning_rate": 4.845360824742268e-06, |
|
"loss": 5.9905, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 9.497435897435897, |
|
"grad_norm": 0.6142096519470215, |
|
"learning_rate": 4.639175257731959e-06, |
|
"loss": 6.2268, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 9.517948717948718, |
|
"grad_norm": 0.6630533933639526, |
|
"learning_rate": 4.43298969072165e-06, |
|
"loss": 6.3161, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 9.538461538461538, |
|
"grad_norm": 0.8330481052398682, |
|
"learning_rate": 4.2268041237113405e-06, |
|
"loss": 6.1675, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 9.558974358974359, |
|
"grad_norm": 0.6822926998138428, |
|
"learning_rate": 4.020618556701032e-06, |
|
"loss": 6.106, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 9.57948717948718, |
|
"grad_norm": 0.7913044095039368, |
|
"learning_rate": 3.8144329896907223e-06, |
|
"loss": 6.2236, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"grad_norm": 0.8586515188217163, |
|
"learning_rate": 3.608247422680412e-06, |
|
"loss": 6.0912, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 9.62051282051282, |
|
"grad_norm": 0.6389515399932861, |
|
"learning_rate": 3.402061855670103e-06, |
|
"loss": 6.1972, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 9.64102564102564, |
|
"grad_norm": 0.647665798664093, |
|
"learning_rate": 3.195876288659794e-06, |
|
"loss": 6.462, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 9.661538461538461, |
|
"grad_norm": 0.7906177043914795, |
|
"learning_rate": 2.989690721649485e-06, |
|
"loss": 6.1017, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 9.682051282051281, |
|
"grad_norm": 0.7301990985870361, |
|
"learning_rate": 2.7835051546391757e-06, |
|
"loss": 6.1658, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 9.702564102564102, |
|
"grad_norm": 0.676216185092926, |
|
"learning_rate": 2.577319587628866e-06, |
|
"loss": 6.3423, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 9.723076923076922, |
|
"grad_norm": 0.7950716614723206, |
|
"learning_rate": 2.3711340206185566e-06, |
|
"loss": 6.1037, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 9.743589743589745, |
|
"grad_norm": 0.6446979641914368, |
|
"learning_rate": 2.1649484536082473e-06, |
|
"loss": 6.245, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 9.764102564102565, |
|
"grad_norm": 0.7083817720413208, |
|
"learning_rate": 1.9587628865979384e-06, |
|
"loss": 6.1738, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 9.784615384615385, |
|
"grad_norm": 0.611316442489624, |
|
"learning_rate": 1.752577319587629e-06, |
|
"loss": 6.3228, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 9.805128205128206, |
|
"grad_norm": 0.7233178019523621, |
|
"learning_rate": 1.5463917525773197e-06, |
|
"loss": 5.7474, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 9.825641025641026, |
|
"grad_norm": 0.6572598814964294, |
|
"learning_rate": 1.3402061855670102e-06, |
|
"loss": 6.245, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 9.846153846153847, |
|
"grad_norm": 0.7650203108787537, |
|
"learning_rate": 1.134020618556701e-06, |
|
"loss": 6.2761, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 9.866666666666667, |
|
"grad_norm": 0.780173122882843, |
|
"learning_rate": 9.278350515463918e-07, |
|
"loss": 6.2113, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 9.887179487179488, |
|
"grad_norm": 0.7613905668258667, |
|
"learning_rate": 7.216494845360825e-07, |
|
"loss": 6.106, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 9.907692307692308, |
|
"grad_norm": 0.8180409669876099, |
|
"learning_rate": 5.154639175257732e-07, |
|
"loss": 6.1713, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 9.928205128205128, |
|
"grad_norm": 0.6750867366790771, |
|
"learning_rate": 3.0927835051546394e-07, |
|
"loss": 6.1835, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 9.948717948717949, |
|
"grad_norm": 0.7007511854171753, |
|
"learning_rate": 1.0309278350515465e-07, |
|
"loss": 6.2497, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 9.948717948717949, |
|
"step": 970, |
|
"total_flos": 1620553852147860.0, |
|
"train_loss": 6.414023149628, |
|
"train_runtime": 12031.1209, |
|
"train_samples_per_second": 1.296, |
|
"train_steps_per_second": 0.081 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 970, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1620553852147860.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|