|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9999153188246253, |
|
"eval_steps": 500, |
|
"global_step": 2952, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.378378378378379e-07, |
|
"loss": 2.6105, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 1.6891891891891894e-06, |
|
"loss": 2.5818, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 3.3783783783783788e-06, |
|
"loss": 2.5823, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 5.067567567567568e-06, |
|
"loss": 2.5736, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 2.5517, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 8.445945945945946e-06, |
|
"loss": 2.5821, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 1.0135135135135136e-05, |
|
"loss": 2.526, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 1.1824324324324325e-05, |
|
"loss": 2.5476, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 2.492, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 1.5202702702702704e-05, |
|
"loss": 2.5255, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 1.6891891891891892e-05, |
|
"loss": 2.4882, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2412109375, |
|
"learning_rate": 1.8581081081081082e-05, |
|
"loss": 2.4763, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2138671875, |
|
"learning_rate": 2.0270270270270273e-05, |
|
"loss": 2.4817, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 2.195945945945946e-05, |
|
"loss": 2.491, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 2.364864864864865e-05, |
|
"loss": 2.4636, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.216796875, |
|
"learning_rate": 2.533783783783784e-05, |
|
"loss": 2.4359, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2099609375, |
|
"learning_rate": 2.702702702702703e-05, |
|
"loss": 2.4172, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 2.8716216216216217e-05, |
|
"loss": 2.395, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 3.0405405405405407e-05, |
|
"loss": 2.3687, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 3.20945945945946e-05, |
|
"loss": 2.3712, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 3.3783783783783784e-05, |
|
"loss": 2.3344, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 3.547297297297297e-05, |
|
"loss": 2.3406, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 3.7162162162162165e-05, |
|
"loss": 2.286, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 3.885135135135135e-05, |
|
"loss": 2.2872, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 4.0540540540540545e-05, |
|
"loss": 2.3243, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1357421875, |
|
"learning_rate": 4.222972972972973e-05, |
|
"loss": 2.2734, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.13671875, |
|
"learning_rate": 4.391891891891892e-05, |
|
"loss": 2.2828, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1376953125, |
|
"learning_rate": 4.560810810810811e-05, |
|
"loss": 2.2821, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 4.72972972972973e-05, |
|
"loss": 2.2867, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 4.8986486486486486e-05, |
|
"loss": 2.2866, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 5.067567567567568e-05, |
|
"loss": 2.2754, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1357421875, |
|
"learning_rate": 5.2364864864864873e-05, |
|
"loss": 2.2497, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.13671875, |
|
"learning_rate": 5.405405405405406e-05, |
|
"loss": 2.2668, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1318359375, |
|
"learning_rate": 5.574324324324325e-05, |
|
"loss": 2.2712, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 5.7432432432432434e-05, |
|
"loss": 2.2803, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 5.912162162162163e-05, |
|
"loss": 2.275, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.138671875, |
|
"learning_rate": 6.0810810810810814e-05, |
|
"loss": 2.2391, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.13671875, |
|
"learning_rate": 6.25e-05, |
|
"loss": 2.2509, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1416015625, |
|
"learning_rate": 6.41891891891892e-05, |
|
"loss": 2.2538, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 6.587837837837837e-05, |
|
"loss": 2.2511, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1455078125, |
|
"learning_rate": 6.756756756756757e-05, |
|
"loss": 2.2711, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.13671875, |
|
"learning_rate": 6.925675675675676e-05, |
|
"loss": 2.2703, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 7.094594594594594e-05, |
|
"loss": 2.2641, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1484375, |
|
"learning_rate": 7.263513513513514e-05, |
|
"loss": 2.2312, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1484375, |
|
"learning_rate": 7.432432432432433e-05, |
|
"loss": 2.2609, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1435546875, |
|
"learning_rate": 7.601351351351351e-05, |
|
"loss": 2.2292, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1416015625, |
|
"learning_rate": 7.77027027027027e-05, |
|
"loss": 2.2355, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.15234375, |
|
"learning_rate": 7.93918918918919e-05, |
|
"loss": 2.2207, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1474609375, |
|
"learning_rate": 8.108108108108109e-05, |
|
"loss": 2.2336, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 8.277027027027028e-05, |
|
"loss": 2.2604, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 8.445945945945946e-05, |
|
"loss": 2.2512, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1552734375, |
|
"learning_rate": 8.614864864864866e-05, |
|
"loss": 2.226, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 8.783783783783784e-05, |
|
"loss": 2.2042, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 8.952702702702703e-05, |
|
"loss": 2.224, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.121621621621623e-05, |
|
"loss": 2.224, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.29054054054054e-05, |
|
"loss": 2.2425, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1513671875, |
|
"learning_rate": 9.45945945945946e-05, |
|
"loss": 2.231, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 9.628378378378379e-05, |
|
"loss": 2.2299, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.154296875, |
|
"learning_rate": 9.797297297297297e-05, |
|
"loss": 2.2135, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.966216216216217e-05, |
|
"loss": 2.2346, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.999944036768366e-05, |
|
"loss": 2.2271, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1533203125, |
|
"learning_rate": 9.999716688286903e-05, |
|
"loss": 2.2334, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 9.999314464799477e-05, |
|
"loss": 2.2153, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 9.998737380374656e-05, |
|
"loss": 2.2373, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 9.997985455197114e-05, |
|
"loss": 2.2456, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 9.99705871556692e-05, |
|
"loss": 2.2055, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.995957193898633e-05, |
|
"loss": 2.1951, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.994680928720159e-05, |
|
"loss": 2.2664, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1533203125, |
|
"learning_rate": 9.993229964671401e-05, |
|
"loss": 2.2517, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 9.991604352502706e-05, |
|
"loss": 2.2064, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1552734375, |
|
"learning_rate": 9.989804149073081e-05, |
|
"loss": 2.2151, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1552734375, |
|
"learning_rate": 9.987829417348213e-05, |
|
"loss": 2.23, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.15234375, |
|
"learning_rate": 9.985680226398261e-05, |
|
"loss": 2.2422, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.983356651395436e-05, |
|
"loss": 2.2123, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 9.980858773611388e-05, |
|
"loss": 2.2226, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.154296875, |
|
"learning_rate": 9.97818668041434e-05, |
|
"loss": 2.2272, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.975340465266053e-05, |
|
"loss": 2.2006, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 9.972320227718546e-05, |
|
"loss": 2.2035, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.154296875, |
|
"learning_rate": 9.969126073410617e-05, |
|
"loss": 2.2141, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 9.965758114064147e-05, |
|
"loss": 2.204, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.962216467480193e-05, |
|
"loss": 2.2051, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.958501257534866e-05, |
|
"loss": 2.2387, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 9.954612614175003e-05, |
|
"loss": 2.2191, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 9.950550673413617e-05, |
|
"loss": 2.2016, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.94631557732514e-05, |
|
"loss": 2.2261, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.941907474040458e-05, |
|
"loss": 2.1897, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.937326517741724e-05, |
|
"loss": 2.2203, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 9.932572868656969e-05, |
|
"loss": 2.194, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.927646693054496e-05, |
|
"loss": 2.2442, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.922548163237066e-05, |
|
"loss": 2.1735, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 9.917277457535872e-05, |
|
"loss": 2.1946, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1533203125, |
|
"learning_rate": 9.911834760304294e-05, |
|
"loss": 2.2291, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.906220261911465e-05, |
|
"loss": 2.1984, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.900434158735598e-05, |
|
"loss": 2.2231, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 9.894476653157126e-05, |
|
"loss": 2.1912, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 9.88834795355162e-05, |
|
"loss": 2.2118, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.882048274282505e-05, |
|
"loss": 2.2189, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 9.875577835693554e-05, |
|
"loss": 2.2098, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.868936864101188e-05, |
|
"loss": 2.2567, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.86212559178656e-05, |
|
"loss": 2.2096, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 9.855144256987423e-05, |
|
"loss": 2.2169, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1533203125, |
|
"learning_rate": 9.84799310388981e-05, |
|
"loss": 2.206, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 9.840672382619478e-05, |
|
"loss": 2.2387, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 9.833182349233174e-05, |
|
"loss": 2.1945, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.825523265709666e-05, |
|
"loss": 2.2079, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.817695399940592e-05, |
|
"loss": 2.2095, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.80969902572108e-05, |
|
"loss": 2.2254, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.801534422740173e-05, |
|
"loss": 2.2193, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 9.793201876571053e-05, |
|
"loss": 2.2287, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 9.784701678661045e-05, |
|
"loss": 2.2142, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 9.77603412632143e-05, |
|
"loss": 2.1981, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.767199522717036e-05, |
|
"loss": 2.1763, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 9.758198176855648e-05, |
|
"loss": 2.1829, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.749030403577184e-05, |
|
"loss": 2.1872, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.739696523542696e-05, |
|
"loss": 2.2023, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 9.73019686322315e-05, |
|
"loss": 2.1966, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 9.720531754888e-05, |
|
"loss": 2.2187, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 9.710701536593581e-05, |
|
"loss": 2.2016, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.700706552171268e-05, |
|
"loss": 2.2113, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 9.690547151215463e-05, |
|
"loss": 2.1862, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.680223689071364e-05, |
|
"loss": 2.1791, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 9.669736526822528e-05, |
|
"loss": 2.2076, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 9.659086031278254e-05, |
|
"loss": 2.2267, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.648272574960744e-05, |
|
"loss": 2.1674, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 9.637296536092075e-05, |
|
"loss": 2.1736, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.626158298580973e-05, |
|
"loss": 2.196, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.614858252009385e-05, |
|
"loss": 2.1903, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.603396791618844e-05, |
|
"loss": 2.1976, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.591774318296661e-05, |
|
"loss": 2.1985, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.154296875, |
|
"learning_rate": 9.579991238561887e-05, |
|
"loss": 2.1934, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 9.568047964551102e-05, |
|
"loss": 2.1879, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.555944914003998e-05, |
|
"loss": 2.1918, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.54368251024877e-05, |
|
"loss": 2.2127, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 9.531261182187308e-05, |
|
"loss": 2.1711, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.51868136428019e-05, |
|
"loss": 2.2108, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 9.505943496531496e-05, |
|
"loss": 2.2295, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.493048024473412e-05, |
|
"loss": 2.1847, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 9.479995399150644e-05, |
|
"loss": 2.1936, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 9.466786077104646e-05, |
|
"loss": 2.1942, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.453420520357652e-05, |
|
"loss": 2.1972, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.15234375, |
|
"learning_rate": 9.439899196396515e-05, |
|
"loss": 2.1822, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 9.426222578156356e-05, |
|
"loss": 2.2155, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 9.412391144004017e-05, |
|
"loss": 2.1891, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 9.398405377721338e-05, |
|
"loss": 2.1806, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.384265768488225e-05, |
|
"loss": 2.1999, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.369972810865557e-05, |
|
"loss": 2.224, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 9.355527004777868e-05, |
|
"loss": 2.2033, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.340928855495872e-05, |
|
"loss": 2.1892, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.32617887361879e-05, |
|
"loss": 2.2023, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 9.311277575056489e-05, |
|
"loss": 2.2089, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.296225481011436e-05, |
|
"loss": 2.1764, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.281023117960468e-05, |
|
"loss": 2.2349, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 9.265671017636383e-05, |
|
"loss": 2.2008, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.250169717009334e-05, |
|
"loss": 2.1711, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 9.234519758268049e-05, |
|
"loss": 2.2002, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 9.218721688800868e-05, |
|
"loss": 2.184, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.202776061176605e-05, |
|
"loss": 2.1849, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 9.186683433125203e-05, |
|
"loss": 2.1952, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.170444367518241e-05, |
|
"loss": 2.2033, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 9.154059432349245e-05, |
|
"loss": 2.191, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 9.13752920071381e-05, |
|
"loss": 2.1984, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 9.120854250789573e-05, |
|
"loss": 2.2054, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 9.104035165815971e-05, |
|
"loss": 2.2218, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 9.087072534073859e-05, |
|
"loss": 2.2102, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 9.069966948864916e-05, |
|
"loss": 2.2294, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 9.052719008490909e-05, |
|
"loss": 2.203, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.035329316232755e-05, |
|
"loss": 2.1868, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 9.017798480329427e-05, |
|
"loss": 2.1755, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 9.000127113956674e-05, |
|
"loss": 2.2099, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1552734375, |
|
"learning_rate": 8.982315835205578e-05, |
|
"loss": 2.2155, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 8.964365267060935e-05, |
|
"loss": 2.2156, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 8.946276037379467e-05, |
|
"loss": 2.1751, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 8.928048778867848e-05, |
|
"loss": 2.1603, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 8.909684129060593e-05, |
|
"loss": 2.179, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 8.89118273029775e-05, |
|
"loss": 2.1944, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 8.872545229702426e-05, |
|
"loss": 2.214, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 8.853772279158166e-05, |
|
"loss": 2.1941, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 8.834864535286143e-05, |
|
"loss": 2.168, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 8.815822659422195e-05, |
|
"loss": 2.206, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 8.796647317593691e-05, |
|
"loss": 2.2074, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 8.777339180496238e-05, |
|
"loss": 2.19, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 8.757898923470218e-05, |
|
"loss": 2.2011, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 8.738327226477176e-05, |
|
"loss": 2.1937, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.158203125, |
|
"learning_rate": 8.718624774076023e-05, |
|
"loss": 2.1904, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 8.698792255399104e-05, |
|
"loss": 2.1914, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 8.67883036412809e-05, |
|
"loss": 2.1953, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 8.658739798469712e-05, |
|
"loss": 2.1928, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 8.638521261131349e-05, |
|
"loss": 2.2242, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 8.618175459296433e-05, |
|
"loss": 2.1607, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 8.597703104599736e-05, |
|
"loss": 2.2007, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 8.577104913102458e-05, |
|
"loss": 2.1936, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 8.556381605267196e-05, |
|
"loss": 2.1839, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 2.1786, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 8.51456254428871e-05, |
|
"loss": 2.1859, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 8.49346825385007e-05, |
|
"loss": 2.2096, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 8.472251772431461e-05, |
|
"loss": 2.2057, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 8.450913842121396e-05, |
|
"loss": 2.1645, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 8.429455209256297e-05, |
|
"loss": 2.1972, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 8.407876624394406e-05, |
|
"loss": 2.194, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 8.38617884228952e-05, |
|
"loss": 2.1823, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 8.364362621864595e-05, |
|
"loss": 2.2122, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 8.342428726185205e-05, |
|
"loss": 2.2262, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 8.320377922432848e-05, |
|
"loss": 2.1803, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 8.298210981878112e-05, |
|
"loss": 2.182, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 8.275928679853703e-05, |
|
"loss": 2.1982, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 8.253531795727319e-05, |
|
"loss": 2.1612, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 8.231021112874402e-05, |
|
"loss": 2.1983, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 8.20839741865072e-05, |
|
"loss": 2.1974, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 8.185661504364844e-05, |
|
"loss": 2.1986, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 8.162814165250464e-05, |
|
"loss": 2.1974, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 8.139856200438575e-05, |
|
"loss": 2.1886, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 8.11678841292952e-05, |
|
"loss": 2.2085, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 8.093611609564913e-05, |
|
"loss": 2.2038, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 8.070326600999416e-05, |
|
"loss": 2.2125, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 8.046934201672376e-05, |
|
"loss": 2.1787, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 8.023435229779351e-05, |
|
"loss": 2.2398, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 7.999830507243478e-05, |
|
"loss": 2.1843, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 7.976120859686744e-05, |
|
"loss": 2.1823, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 7.952307116401086e-05, |
|
"loss": 2.1992, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.9283901103194e-05, |
|
"loss": 2.1805, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.904370677986404e-05, |
|
"loss": 2.2111, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 7.880249659529376e-05, |
|
"loss": 2.1732, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 7.85602789862877e-05, |
|
"loss": 2.1761, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 7.831706242488708e-05, |
|
"loss": 2.1856, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 7.80728554180734e-05, |
|
"loss": 2.1798, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 7.782766650747108e-05, |
|
"loss": 2.1974, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 7.758150426904845e-05, |
|
"loss": 2.1742, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 7.733437731281797e-05, |
|
"loss": 2.1869, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 7.708629428253497e-05, |
|
"loss": 2.1808, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 7.683726385539544e-05, |
|
"loss": 2.2019, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 7.658729474173241e-05, |
|
"loss": 2.2051, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 7.63363956847113e-05, |
|
"loss": 2.1951, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.608457546002424e-05, |
|
"loss": 2.2108, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 7.58318428755829e-05, |
|
"loss": 2.1563, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 7.557820677121067e-05, |
|
"loss": 2.1778, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 7.532367601833321e-05, |
|
"loss": 2.2198, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.506825951966843e-05, |
|
"loss": 2.1818, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 7.481196620891482e-05, |
|
"loss": 2.1774, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 7.45548050504392e-05, |
|
"loss": 2.167, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 7.429678503896304e-05, |
|
"loss": 2.153, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.403791519924794e-05, |
|
"loss": 2.1921, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 7.377820458577987e-05, |
|
"loss": 2.1914, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 7.351766228245259e-05, |
|
"loss": 2.1962, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 7.325629740224979e-05, |
|
"loss": 2.216, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 7.299411908692649e-05, |
|
"loss": 2.1387, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 7.273113650668919e-05, |
|
"loss": 2.1843, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 7.246735885987515e-05, |
|
"loss": 2.1779, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 7.220279537263063e-05, |
|
"loss": 2.2041, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.193745529858826e-05, |
|
"loss": 2.1866, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 7.167134791854333e-05, |
|
"loss": 2.1654, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 7.140448254012912e-05, |
|
"loss": 2.1932, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 7.113686849749153e-05, |
|
"loss": 2.1708, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 7.086851515096233e-05, |
|
"loss": 2.1748, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 7.0599431886732e-05, |
|
"loss": 2.1981, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 7.032962811652133e-05, |
|
"loss": 2.1723, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.005911327725222e-05, |
|
"loss": 2.1849, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 6.97878968307176e-05, |
|
"loss": 2.1794, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 6.951598826325056e-05, |
|
"loss": 2.1861, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 6.924339708539244e-05, |
|
"loss": 2.1937, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 6.897013283156026e-05, |
|
"loss": 2.1785, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 6.869620505971321e-05, |
|
"loss": 2.1728, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 6.842162335101829e-05, |
|
"loss": 2.2097, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 6.814639730951532e-05, |
|
"loss": 2.166, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 6.787053656178087e-05, |
|
"loss": 2.1783, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 6.759405075659166e-05, |
|
"loss": 2.2017, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 6.731694956458701e-05, |
|
"loss": 2.1601, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 6.703924267793061e-05, |
|
"loss": 2.1962, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 6.676093980997155e-05, |
|
"loss": 2.1764, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 6.648205069490451e-05, |
|
"loss": 2.2019, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 6.620258508742935e-05, |
|
"loss": 2.205, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 6.592255276240994e-05, |
|
"loss": 2.162, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 6.564196351453209e-05, |
|
"loss": 2.1992, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 6.536082715796125e-05, |
|
"loss": 2.1835, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 6.507915352599895e-05, |
|
"loss": 2.1851, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 6.479695247073907e-05, |
|
"loss": 2.2148, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 6.451423386272312e-05, |
|
"loss": 2.1823, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 6.423100759059509e-05, |
|
"loss": 2.1676, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 6.394728356075551e-05, |
|
"loss": 2.1841, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 6.366307169701496e-05, |
|
"loss": 2.1638, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 6.337838194024697e-05, |
|
"loss": 2.1984, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 6.309322424804034e-05, |
|
"loss": 2.1869, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 6.280760859435087e-05, |
|
"loss": 2.1684, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 6.252154496915244e-05, |
|
"loss": 2.1879, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 6.223504337808761e-05, |
|
"loss": 2.1625, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 6.194811384211768e-05, |
|
"loss": 2.1731, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 6.166076639717218e-05, |
|
"loss": 2.1673, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 6.137301109379783e-05, |
|
"loss": 2.1944, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 6.108485799680701e-05, |
|
"loss": 2.1989, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 6.079631718492569e-05, |
|
"loss": 2.163, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 6.050739875044098e-05, |
|
"loss": 2.1824, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 6.021811279884807e-05, |
|
"loss": 2.193, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 5.992846944849679e-05, |
|
"loss": 2.196, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 5.96384788302377e-05, |
|
"loss": 2.1575, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 5.934815108706775e-05, |
|
"loss": 2.2159, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 5.905749637377549e-05, |
|
"loss": 2.1838, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 5.87665248565859e-05, |
|
"loss": 2.2138, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 5.847524671280484e-05, |
|
"loss": 2.1744, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 5.818367213046298e-05, |
|
"loss": 2.1914, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 5.7891811307959574e-05, |
|
"loss": 2.1712, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 5.759967445370564e-05, |
|
"loss": 2.161, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 5.7307271785767034e-05, |
|
"loss": 2.1811, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 5.701461353150687e-05, |
|
"loss": 2.1932, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 5.6721709927227974e-05, |
|
"loss": 2.1521, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 5.642857121781475e-05, |
|
"loss": 2.1954, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 5.613520765637489e-05, |
|
"loss": 2.169, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 5.58416295038807e-05, |
|
"loss": 2.2079, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 5.554784702881025e-05, |
|
"loss": 2.1636, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 5.525387050678819e-05, |
|
"loss": 2.1689, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 5.495971022022638e-05, |
|
"loss": 2.179, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 5.466537645796416e-05, |
|
"loss": 2.195, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 5.437087951490856e-05, |
|
"loss": 2.1827, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 5.4076229691674164e-05, |
|
"loss": 2.1747, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 5.3781437294222845e-05, |
|
"loss": 2.179, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 5.3486512633503303e-05, |
|
"loss": 2.219, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 5.319146602509042e-05, |
|
"loss": 2.198, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 5.289630778882442e-05, |
|
"loss": 2.1706, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 5.260104824844989e-05, |
|
"loss": 2.1725, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 5.230569773125484e-05, |
|
"loss": 2.1811, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 5.201026656770926e-05, |
|
"loss": 2.1745, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 5.1714765091104003e-05, |
|
"loss": 2.1982, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 5.141920363718916e-05, |
|
"loss": 2.1957, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 5.1123592543812734e-05, |
|
"loss": 2.1758, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 5.082794215055894e-05, |
|
"loss": 2.1576, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 5.0532262798386544e-05, |
|
"loss": 2.1923, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 5.023656482926727e-05, |
|
"loss": 2.1893, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 4.994085858582397e-05, |
|
"loss": 2.163, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 4.964515441096889e-05, |
|
"loss": 2.1753, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 4.934946264754199e-05, |
|
"loss": 2.1831, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 4.9053793637949067e-05, |
|
"loss": 2.1525, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 4.875815772380002e-05, |
|
"loss": 2.1687, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 4.846256524554725e-05, |
|
"loss": 2.1775, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 4.8167026542123874e-05, |
|
"loss": 2.1846, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 4.78715519505821e-05, |
|
"loss": 2.1507, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 4.7576151805731695e-05, |
|
"loss": 2.2013, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 4.728083643977855e-05, |
|
"loss": 2.1392, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 4.698561618196323e-05, |
|
"loss": 2.184, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 4.669050135819966e-05, |
|
"loss": 2.1987, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 4.639550229071407e-05, |
|
"loss": 2.1599, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 4.610062929768383e-05, |
|
"loss": 2.1722, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 4.580589269287661e-05, |
|
"loss": 2.1756, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 4.5511302785289685e-05, |
|
"loss": 2.1929, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 4.521686987878925e-05, |
|
"loss": 2.1731, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 4.492260427175007e-05, |
|
"loss": 2.182, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 4.4628516256695305e-05, |
|
"loss": 2.1768, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 4.433461611993651e-05, |
|
"loss": 2.162, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 4.4040914141213774e-05, |
|
"loss": 2.1815, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 4.374742059333621e-05, |
|
"loss": 2.1603, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 4.345414574182272e-05, |
|
"loss": 2.1844, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 4.316109984454278e-05, |
|
"loss": 2.1923, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 4.2868293151357806e-05, |
|
"loss": 2.2025, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 4.2575735903762513e-05, |
|
"loss": 2.1585, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 4.228343833452684e-05, |
|
"loss": 2.1745, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 4.1991410667337896e-05, |
|
"loss": 2.2175, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 4.1699663116442434e-05, |
|
"loss": 2.1835, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 4.140820588628964e-05, |
|
"loss": 2.1859, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 4.1117049171174104e-05, |
|
"loss": 2.1912, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 4.082620315487931e-05, |
|
"loss": 2.1961, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 4.053567801032144e-05, |
|
"loss": 2.1623, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 4.0245483899193595e-05, |
|
"loss": 2.1723, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 3.995563097161026e-05, |
|
"loss": 2.1777, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 3.966612936575235e-05, |
|
"loss": 2.1937, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 3.937698920751268e-05, |
|
"loss": 2.184, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 3.9088220610141655e-05, |
|
"loss": 2.181, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 3.87998336738936e-05, |
|
"loss": 2.1893, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 3.851183848567351e-05, |
|
"loss": 2.175, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 3.822424511868421e-05, |
|
"loss": 2.1897, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 3.7937063632074036e-05, |
|
"loss": 2.16, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 3.7650304070584955e-05, |
|
"loss": 2.1899, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.736397646420135e-05, |
|
"loss": 2.1879, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 3.7078090827799e-05, |
|
"loss": 2.1832, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.679265716079501e-05, |
|
"loss": 2.1894, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 3.650768544679788e-05, |
|
"loss": 2.1831, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.622318565325847e-05, |
|
"loss": 2.1538, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 3.593916773112122e-05, |
|
"loss": 2.1717, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 3.565564161447617e-05, |
|
"loss": 2.187, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 3.5372617220211525e-05, |
|
"loss": 2.1609, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 3.509010444766674e-05, |
|
"loss": 2.1721, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 3.480811317828625e-05, |
|
"loss": 2.162, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.452665327527391e-05, |
|
"loss": 2.202, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 3.4245734583248e-05, |
|
"loss": 2.1657, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 3.3965366927896864e-05, |
|
"loss": 2.1612, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.3685560115635195e-05, |
|
"loss": 2.1608, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 3.340632393326118e-05, |
|
"loss": 2.1767, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 3.3127668147614e-05, |
|
"loss": 2.1851, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 3.284960250523237e-05, |
|
"loss": 2.1778, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 3.2572136732013555e-05, |
|
"loss": 2.1723, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 3.2295280532873226e-05, |
|
"loss": 2.1986, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 3.2019043591405936e-05, |
|
"loss": 2.1746, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 3.174343556954652e-05, |
|
"loss": 2.1711, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 3.146846610723212e-05, |
|
"loss": 2.1924, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 3.1194144822064944e-05, |
|
"loss": 2.2043, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 3.0920481308975926e-05, |
|
"loss": 2.1798, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 3.0647485139889145e-05, |
|
"loss": 2.1784, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 3.037516586338699e-05, |
|
"loss": 2.2003, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 3.0103533004376183e-05, |
|
"loss": 2.2178, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 2.9832596063754613e-05, |
|
"loss": 2.1773, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 2.9562364518079105e-05, |
|
"loss": 2.2017, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.929284781923382e-05, |
|
"loss": 2.1396, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 2.902405539409978e-05, |
|
"loss": 2.1828, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.8755996644225097e-05, |
|
"loss": 2.1618, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.848868094549615e-05, |
|
"loss": 2.1648, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 2.8222117647809553e-05, |
|
"loss": 2.1717, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 2.7956316074745293e-05, |
|
"loss": 2.1584, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 2.7691285523240474e-05, |
|
"loss": 2.1671, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.7427035263264222e-05, |
|
"loss": 2.1939, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 2.7163574537493407e-05, |
|
"loss": 2.158, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.690091256098936e-05, |
|
"loss": 2.1482, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 2.6639058520875615e-05, |
|
"loss": 2.1577, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 2.6378021576016466e-05, |
|
"loss": 2.1594, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 2.6117810856696702e-05, |
|
"loss": 2.194, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 2.5858435464302315e-05, |
|
"loss": 2.1481, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.559990447100195e-05, |
|
"loss": 2.1835, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 2.5342226919429806e-05, |
|
"loss": 2.1606, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 2.5085411822369244e-05, |
|
"loss": 2.1763, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 2.4829468162437554e-05, |
|
"loss": 2.1724, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 2.4574404891771826e-05, |
|
"loss": 2.1815, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 2.43202309317157e-05, |
|
"loss": 2.1717, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.406695517250753e-05, |
|
"loss": 2.1995, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 2.381458647296925e-05, |
|
"loss": 2.1933, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 2.3563133660196556e-05, |
|
"loss": 2.1821, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 2.3312605529250276e-05, |
|
"loss": 2.1763, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 2.3063010842848564e-05, |
|
"loss": 2.1838, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 2.2814358331060532e-05, |
|
"loss": 2.1895, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 2.2566656691000932e-05, |
|
"loss": 2.1926, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 2.2319914586525777e-05, |
|
"loss": 2.19, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 2.2074140647929503e-05, |
|
"loss": 2.1611, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 2.1829343471642994e-05, |
|
"loss": 2.1936, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 2.158553161993294e-05, |
|
"loss": 2.1815, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 2.134271362060238e-05, |
|
"loss": 2.1871, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.1100897966692297e-05, |
|
"loss": 2.1642, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.0860093116184797e-05, |
|
"loss": 2.1861, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 2.0620307491707012e-05, |
|
"loss": 2.1904, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 2.0381549480236685e-05, |
|
"loss": 2.1855, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 2.0143827432808743e-05, |
|
"loss": 2.1886, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 1.990714966422321e-05, |
|
"loss": 2.1681, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 1.9671524452754393e-05, |
|
"loss": 2.1596, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 1.9436960039861324e-05, |
|
"loss": 2.1709, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 1.9203464629899502e-05, |
|
"loss": 2.1794, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.8971046389833952e-05, |
|
"loss": 2.1867, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 1.873971344895347e-05, |
|
"loss": 2.1681, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 1.850947389858643e-05, |
|
"loss": 2.1587, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 1.8280335791817733e-05, |
|
"loss": 2.1862, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 1.805230714320701e-05, |
|
"loss": 2.1756, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 1.7825395928508447e-05, |
|
"loss": 2.1792, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.7599610084391784e-05, |
|
"loss": 2.1885, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 1.737495750816464e-05, |
|
"loss": 2.1602, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 1.7151446057496406e-05, |
|
"loss": 2.1823, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.6929083550143255e-05, |
|
"loss": 2.1806, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 1.670787776367489e-05, |
|
"loss": 2.1781, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 1.6487836435202357e-05, |
|
"loss": 2.1545, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 1.6268967261107426e-05, |
|
"loss": 2.1559, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 1.6051277896773565e-05, |
|
"loss": 2.1705, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 1.583477595631794e-05, |
|
"loss": 2.1662, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 1.5619469012325255e-05, |
|
"loss": 2.171, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 1.540536459558286e-05, |
|
"loss": 2.1957, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 1.519247019481731e-05, |
|
"loss": 2.1696, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 1.4980793256432474e-05, |
|
"loss": 2.1624, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 1.4770341184248997e-05, |
|
"loss": 2.162, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 1.4561121339245487e-05, |
|
"loss": 2.2188, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 1.4353141039300921e-05, |
|
"loss": 2.1537, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.4146407558938695e-05, |
|
"loss": 2.1682, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 1.3940928129072279e-05, |
|
"loss": 2.1791, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.3736709936752196e-05, |
|
"loss": 2.2001, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.3533760124914713e-05, |
|
"loss": 2.1718, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.3332085792131966e-05, |
|
"loss": 2.1829, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.3131693992363664e-05, |
|
"loss": 2.17, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.293259173471041e-05, |
|
"loss": 2.1583, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 1.2734785983168485e-05, |
|
"loss": 2.1606, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.2538283656386319e-05, |
|
"loss": 2.1793, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.2343091627422487e-05, |
|
"loss": 2.1768, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 1.2149216723505246e-05, |
|
"loss": 2.1973, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 1.1956665725793831e-05, |
|
"loss": 2.1827, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.1765445369141276e-05, |
|
"loss": 2.174, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.1575562341858709e-05, |
|
"loss": 2.1979, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 1.1387023285481575e-05, |
|
"loss": 2.1689, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 1.1199834794537263e-05, |
|
"loss": 2.2038, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 1.1014003416314439e-05, |
|
"loss": 2.1569, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 1.0829535650634104e-05, |
|
"loss": 2.1832, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.0646437949622118e-05, |
|
"loss": 2.2037, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.0464716717483736e-05, |
|
"loss": 2.1605, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.0284378310279369e-05, |
|
"loss": 2.1733, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.0105429035702441e-05, |
|
"loss": 2.1641, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 9.927875152858729e-06, |
|
"loss": 2.1786, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 9.751722872047353e-06, |
|
"loss": 2.1697, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 9.57697835454367e-06, |
|
"loss": 2.1725, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 9.403647712383712e-06, |
|
"loss": 2.1869, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 9.231737008150415e-06, |
|
"loss": 2.1992, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 9.061252254761576e-06, |
|
"loss": 2.1953, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 8.8921994152595e-06, |
|
"loss": 2.196, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 8.724584402602521e-06, |
|
"loss": 2.1629, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 8.558413079458106e-06, |
|
"loss": 2.159, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 8.393691257997782e-06, |
|
"loss": 2.1791, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 8.230424699693923e-06, |
|
"loss": 2.1723, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 8.068619115118176e-06, |
|
"loss": 2.1735, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 7.908280163741732e-06, |
|
"loss": 2.1856, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 7.749413453737375e-06, |
|
"loss": 2.1971, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 7.592024541783343e-06, |
|
"loss": 2.1577, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 7.43611893286893e-06, |
|
"loss": 2.1727, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 7.281702080102004e-06, |
|
"loss": 2.1791, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 7.128779384518164e-06, |
|
"loss": 2.139, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 6.977356194891998e-06, |
|
"loss": 2.1698, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 6.827437807549814e-06, |
|
"loss": 2.1472, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 6.679029466184506e-06, |
|
"loss": 2.198, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 6.5321363616721306e-06, |
|
"loss": 2.1632, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 6.386763631890313e-06, |
|
"loss": 2.1649, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 6.242916361538559e-06, |
|
"loss": 2.1872, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 6.100599581960415e-06, |
|
"loss": 2.1755, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 5.9598182709674655e-06, |
|
"loss": 2.1657, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 5.820577352665252e-06, |
|
"loss": 2.1639, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 5.682881697280984e-06, |
|
"loss": 2.185, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 5.546736120993318e-06, |
|
"loss": 2.1609, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 5.412145385763728e-06, |
|
"loss": 2.1895, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 5.279114199170093e-06, |
|
"loss": 2.1446, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 5.1476472142419965e-06, |
|
"loss": 2.156, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 5.017749029297919e-06, |
|
"loss": 2.1997, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 4.889424187784486e-06, |
|
"loss": 2.1674, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 4.762677178117503e-06, |
|
"loss": 2.1792, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 4.637512433524987e-06, |
|
"loss": 2.1648, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 4.5139343318920945e-06, |
|
"loss": 2.1833, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 4.391947195607965e-06, |
|
"loss": 2.1809, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 4.271555291414636e-06, |
|
"loss": 2.2085, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 4.152762830257689e-06, |
|
"loss": 2.153, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 4.035573967139023e-06, |
|
"loss": 2.1964, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 3.919992800971517e-06, |
|
"loss": 2.1967, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 3.8060233744356633e-06, |
|
"loss": 2.1448, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 3.6936696738381737e-06, |
|
"loss": 2.151, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 3.5829356289725223e-06, |
|
"loss": 2.1674, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 3.473825112981527e-06, |
|
"loss": 2.1734, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 3.3663419422218677e-06, |
|
"loss": 2.2007, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.260489876130568e-06, |
|
"loss": 2.1475, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 3.156272617093553e-06, |
|
"loss": 2.1635, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 3.0536938103161494e-06, |
|
"loss": 2.212, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.9527570436955255e-06, |
|
"loss": 2.1406, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 2.8534658476952635e-06, |
|
"loss": 2.2019, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 2.7558236952218485e-06, |
|
"loss": 2.2006, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 2.659834001503186e-06, |
|
"loss": 2.1873, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 2.5655001239691835e-06, |
|
"loss": 2.1746, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 2.4728253621342566e-06, |
|
"loss": 2.1732, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.381812957481999e-06, |
|
"loss": 2.1399, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 2.292466093351747e-06, |
|
"loss": 2.18, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 2.2047878948272373e-06, |
|
"loss": 2.1758, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 2.1187814286273646e-06, |
|
"loss": 2.1574, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 2.0344497029988086e-06, |
|
"loss": 2.163, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.951795667610928e-06, |
|
"loss": 2.163, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.8708222134525167e-06, |
|
"loss": 2.2011, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 1.7915321727307088e-06, |
|
"loss": 2.182, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.7139283187719124e-06, |
|
"loss": 2.1744, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 1.6380133659248176e-06, |
|
"loss": 2.1674, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 1.5637899694654456e-06, |
|
"loss": 2.1665, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.4912607255042787e-06, |
|
"loss": 2.1871, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 1.4204281708954437e-06, |
|
"loss": 2.1737, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 1.3512947831480217e-06, |
|
"loss": 2.1824, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.2838629803393342e-06, |
|
"loss": 2.1608, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 1.218135121030406e-06, |
|
"loss": 2.1652, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 1.1541135041834628e-06, |
|
"loss": 2.1705, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.0918003690815138e-06, |
|
"loss": 2.1644, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 1.03119789525003e-06, |
|
"loss": 2.1674, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 9.723082023807118e-07, |
|
"loss": 2.158, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 9.151333502573467e-07, |
|
"loss": 2.2049, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 8.596753386837797e-07, |
|
"loss": 2.1732, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 8.059361074139293e-07, |
|
"loss": 2.1608, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 7.539175360839812e-07, |
|
"loss": 2.1728, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 7.036214441466348e-07, |
|
"loss": 2.1575, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 6.550495908074328e-07, |
|
"loss": 2.179, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 6.082036749632703e-07, |
|
"loss": 2.1894, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 5.630853351429599e-07, |
|
"loss": 2.1787, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 5.196961494498997e-07, |
|
"loss": 2.1696, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 4.780376355069172e-07, |
|
"loss": 2.1568, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 4.381112504031337e-07, |
|
"loss": 2.1699, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 3.9991839064305035e-07, |
|
"loss": 2.1795, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 3.634603920976809e-07, |
|
"loss": 2.1702, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 3.2873852995781716e-07, |
|
"loss": 2.1864, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 2.957540186894481e-07, |
|
"loss": 2.1621, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 2.6450801199126573e-07, |
|
"loss": 2.2217, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 2.3500160275430893e-07, |
|
"loss": 2.1848, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 2.0723582302376588e-07, |
|
"loss": 2.187, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 1.812116439628364e-07, |
|
"loss": 2.1751, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.569299758187981e-07, |
|
"loss": 2.1874, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 1.3439166789113722e-07, |
|
"loss": 2.158, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 1.1359750850187256e-07, |
|
"loss": 2.1644, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 9.454822496796634e-08, |
|
"loss": 2.1859, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.724448357588898e-08, |
|
"loss": 2.1779, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 6.168688955830448e-08, |
|
"loss": 2.1375, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 4.7875987072915075e-08, |
|
"loss": 2.1857, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 3.581225918342646e-08, |
|
"loss": 2.1608, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 2.5496127842644658e-08, |
|
"loss": 2.1335, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 1.692795387772117e-08, |
|
"loss": 2.1946, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.0108036977535262e-08, |
|
"loss": 2.1851, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 5.03661568219127e-09, |
|
"loss": 2.1765, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 1.7138673747196799e-09, |
|
"loss": 2.2174, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 1.3990827483212344e-10, |
|
"loss": 2.1651, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.1763968467712402, |
|
"eval_runtime": 177.8602, |
|
"eval_samples_per_second": 14.939, |
|
"eval_steps_per_second": 1.872, |
|
"step": 2952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 2952, |
|
"total_flos": 3.038696615046021e+17, |
|
"train_loss": 2.200481324176478, |
|
"train_runtime": 12060.097, |
|
"train_samples_per_second": 3.917, |
|
"train_steps_per_second": 0.245 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2952, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 3.038696615046021e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|