|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 1731, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 35.0, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 1.2265, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.2022, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.0622, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 6e-06, |
|
"loss": 0.9902, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.9469, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9267, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 9.999126844313853e-06, |
|
"loss": 0.9064, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 9.996507682215754e-06, |
|
"loss": 0.8855, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 9.992143428480215e-06, |
|
"loss": 0.8732, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 9.986035607376421e-06, |
|
"loss": 0.8754, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 9.97818635213586e-06, |
|
"loss": 0.8692, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 9.968598404207276e-06, |
|
"loss": 0.8661, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 9.957275112299164e-06, |
|
"loss": 0.8518, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 9.944220431210216e-06, |
|
"loss": 0.8465, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 9.929438920448038e-06, |
|
"loss": 0.8465, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.912935742636698e-06, |
|
"loss": 0.8386, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.894716661713616e-06, |
|
"loss": 0.8346, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 9.874788040916433e-06, |
|
"loss": 0.8376, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 9.853156840560576e-06, |
|
"loss": 0.8245, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 9.82983061560828e-06, |
|
"loss": 0.8201, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 9.804817513029927e-06, |
|
"loss": 0.8287, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 9.778126268958614e-06, |
|
"loss": 0.8136, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 9.749766205638954e-06, |
|
"loss": 0.8097, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 9.719747228171163e-06, |
|
"loss": 0.8038, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 9.688079821051594e-06, |
|
"loss": 0.8007, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 9.65477504451088e-06, |
|
"loss": 0.791, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.619844530651026e-06, |
|
"loss": 0.7975, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 9.583300479382754e-06, |
|
"loss": 0.7916, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 9.545155654164523e-06, |
|
"loss": 0.7919, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 9.50542337754475e-06, |
|
"loss": 0.7991, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 9.464117526508736e-06, |
|
"loss": 0.7858, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 9.421252527631964e-06, |
|
"loss": 0.7785, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 9.376843352041434e-06, |
|
"loss": 0.7859, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 9.33090551018682e-06, |
|
"loss": 0.778, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 9.283455046423254e-06, |
|
"loss": 0.778, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 9.234508533407633e-06, |
|
"loss": 0.7788, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 9.184083066310419e-06, |
|
"loss": 0.7765, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.132196256844941e-06, |
|
"loss": 0.7643, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 9.078866227116288e-06, |
|
"loss": 0.7642, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 9.02411160329194e-06, |
|
"loss": 0.7666, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 8.96795150909635e-06, |
|
"loss": 0.7585, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 8.910405559131752e-06, |
|
"loss": 0.7603, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 8.851493852027515e-06, |
|
"loss": 0.7602, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 8.791236963420458e-06, |
|
"loss": 0.7561, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 8.729655938768542e-06, |
|
"loss": 0.7575, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 8.666772286000505e-06, |
|
"loss": 0.7622, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 31.125, |
|
"learning_rate": 8.602607968003935e-06, |
|
"loss": 0.7554, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 8.53718539495448e-06, |
|
"loss": 0.7429, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 8.470527416488799e-06, |
|
"loss": 0.7561, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 8.402657313724065e-06, |
|
"loss": 0.7591, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 8.333598791126737e-06, |
|
"loss": 0.7455, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 8.263375968233487e-06, |
|
"loss": 0.7397, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 8.192013371227157e-06, |
|
"loss": 0.7506, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 8.119535924370692e-06, |
|
"loss": 0.7478, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 8.045968941302024e-06, |
|
"loss": 0.7414, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 7.971338116192986e-06, |
|
"loss": 0.7436, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 7.895669514775302e-06, |
|
"loss": 0.744, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 7.818989565236806e-06, |
|
"loss": 0.7347, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 7.74132504899108e-06, |
|
"loss": 0.7188, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 7.662703091323713e-06, |
|
"loss": 0.6638, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 7.583151151918464e-06, |
|
"loss": 0.6657, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 7.502697015266623e-06, |
|
"loss": 0.6547, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 7.421368780962951e-06, |
|
"loss": 0.6598, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 7.339194853891536e-06, |
|
"loss": 0.6528, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.375, |
|
"learning_rate": 7.256203934305047e-06, |
|
"loss": 0.6538, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 7.172425007800822e-06, |
|
"loss": 0.6573, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 7.087887335197281e-06, |
|
"loss": 0.6538, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 7.0026204423142364e-06, |
|
"loss": 0.6562, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.91665410966063e-06, |
|
"loss": 0.6516, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 6.830018362033335e-06, |
|
"loss": 0.6492, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 6.742743458030614e-06, |
|
"loss": 0.6508, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 6.654859879483942e-06, |
|
"loss": 0.6492, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.566398320811852e-06, |
|
"loss": 0.6541, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 6.477389678299528e-06, |
|
"loss": 0.6481, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 6.387865039307899e-06, |
|
"loss": 0.6511, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 6.297855671415999e-06, |
|
"loss": 0.6546, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 6.207393011500382e-06, |
|
"loss": 0.6491, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 6.116508654755402e-06, |
|
"loss": 0.6482, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.025234343658209e-06, |
|
"loss": 0.6464, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.933601956882299e-06, |
|
"loss": 0.6466, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 5.841643498163488e-06, |
|
"loss": 0.6549, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 5.7493910851222244e-06, |
|
"loss": 0.6445, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 5.656876938046112e-06, |
|
"loss": 0.6432, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 5.56413336863658e-06, |
|
"loss": 0.6485, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 5.471192768723618e-06, |
|
"loss": 0.6445, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 5.378087598952543e-06, |
|
"loss": 0.6493, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 5.284850377446715e-06, |
|
"loss": 0.6493, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 5.191513668450178e-06, |
|
"loss": 0.6481, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.125, |
|
"learning_rate": 5.098110070954209e-06, |
|
"loss": 0.6487, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 5.004672207311709e-06, |
|
"loss": 0.6466, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 4.911232711843459e-06, |
|
"loss": 0.6407, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 4.817824219440167e-06, |
|
"loss": 0.6417, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 4.724479354164347e-06, |
|
"loss": 0.6385, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 4.631230717855949e-06, |
|
"loss": 0.6441, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 4.538110878745784e-06, |
|
"loss": 0.6445, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.4451523600806555e-06, |
|
"loss": 0.6397, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 4.352387628764223e-06, |
|
"loss": 0.6387, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.125, |
|
"learning_rate": 4.259849084017537e-06, |
|
"loss": 0.6374, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 4.16756904606321e-06, |
|
"loss": 0.6268, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 4.075579744837185e-06, |
|
"loss": 0.6394, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.983913308732033e-06, |
|
"loss": 0.635, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 3.89260175337572e-06, |
|
"loss": 0.644, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.801676970449755e-06, |
|
"loss": 0.6357, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.711170716550627e-06, |
|
"loss": 0.6314, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.125, |
|
"learning_rate": 3.6211146020984246e-06, |
|
"loss": 0.6364, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 3.531540080296513e-06, |
|
"loss": 0.6356, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.125, |
|
"learning_rate": 3.442478436146108e-06, |
|
"loss": 0.6373, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.353960775519612e-06, |
|
"loss": 0.6394, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.266018014296506e-06, |
|
"loss": 0.6377, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.1786808675655973e-06, |
|
"loss": 0.6313, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 3.0919798388973977e-06, |
|
"loss": 0.6403, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 3.005945209690378e-06, |
|
"loss": 0.6325, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 2.920607028594819e-06, |
|
"loss": 0.6285, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 2.835995101017943e-06, |
|
"loss": 0.6372, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 2.7521389787140264e-06, |
|
"loss": 0.6336, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 2.6690679494630734e-06, |
|
"loss": 0.6325, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 2.5868110268416974e-06, |
|
"loss": 0.6033, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 3.0, |
|
"learning_rate": 2.5053969400897847e-06, |
|
"loss": 0.5946, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.125, |
|
"learning_rate": 2.4248541240764484e-06, |
|
"loss": 0.5962, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 2.345210709368802e-06, |
|
"loss": 0.6014, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 2.2664945124070076e-06, |
|
"loss": 0.5929, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 2.1887330257890554e-06, |
|
"loss": 0.5869, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 2.1119534086686265e-06, |
|
"loss": 0.5909, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 2.036182477269429e-06, |
|
"loss": 0.5916, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.9614466955193007e-06, |
|
"loss": 0.5981, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.8877721658073589e-06, |
|
"loss": 0.5903, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.8151846198674227e-06, |
|
"loss": 0.5964, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.7437094097908813e-06, |
|
"loss": 0.5952, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.6733714991721738e-06, |
|
"loss": 0.5901, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.6041954543899452e-06, |
|
"loss": 0.5891, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.5362054360269336e-06, |
|
"loss": 0.5912, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.469425190431592e-06, |
|
"loss": 0.5946, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.4038780414243875e-06, |
|
"loss": 0.5979, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.3395868821516667e-06, |
|
"loss": 0.587, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.2765741670899428e-06, |
|
"loss": 0.5933, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.2148619042033994e-06, |
|
"loss": 0.5929, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.1544716472573336e-06, |
|
"loss": 0.5858, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.0954244882902426e-06, |
|
"loss": 0.5905, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.0377410502471668e-06, |
|
"loss": 0.5858, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 9.814414797768756e-07, |
|
"loss": 0.5957, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 9.265454401954044e-07, |
|
"loss": 0.5915, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 8.730721046183932e-07, |
|
"loss": 0.5869, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 8.210401492646497e-07, |
|
"loss": 0.5947, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 7.704677469332455e-07, |
|
"loss": 0.5925, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.125, |
|
"learning_rate": 7.213725606564436e-07, |
|
"loss": 0.5945, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 6.737717375306684e-07, |
|
"loss": 0.5921, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 6.276819027276709e-07, |
|
"loss": 0.5861, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 5.831191536879843e-07, |
|
"loss": 0.5955, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 5.400990544986922e-07, |
|
"loss": 0.5896, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 4.986366304574869e-07, |
|
"loss": 0.595, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 4.587463628248923e-07, |
|
"loss": 0.5887, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 4.2044218376651124e-07, |
|
"loss": 0.5929, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 3.837374714870434e-07, |
|
"loss": 0.6026, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 3.486450455577828e-07, |
|
"loss": 0.5933, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 3.1517716243922414e-07, |
|
"loss": 0.5923, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 2.8334551120034857e-07, |
|
"loss": 0.5962, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.125, |
|
"learning_rate": 2.5316120943606634e-07, |
|
"loss": 0.5922, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 2.246347993842657e-07, |
|
"loss": 0.5872, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.977762442438025e-07, |
|
"loss": 0.5943, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.725949246947345e-07, |
|
"loss": 0.5888, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.4909963562200036e-07, |
|
"loss": 0.5929, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.272985830437018e-07, |
|
"loss": 0.5914, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.0719938124504725e-07, |
|
"loss": 0.5984, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 8.880905011897145e-08, |
|
"loss": 0.5939, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 7.213401271435216e-08, |
|
"loss": 0.5913, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 5.718009299267968e-08, |
|
"loss": 0.5824, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 4.395251379396781e-08, |
|
"loss": 0.5968, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 3.2455895012614146e-08, |
|
"loss": 0.5877, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 2.2694251983841896e-08, |
|
"loss": 0.5875, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.4670994081297796e-08, |
|
"loss": 0.5915, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 8.38892352628573e-09, |
|
"loss": 0.592, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.850234409055742e-09, |
|
"loss": 0.5934, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.0565119224925646e-09, |
|
"loss": 0.5903, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.125, |
|
"learning_rate": 8.731808465167746e-12, |
|
"loss": 0.5943, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1731, |
|
"total_flos": 2.4199600073374433e+18, |
|
"train_loss": 0.6834336294733948, |
|
"train_runtime": 21601.4132, |
|
"train_samples_per_second": 10.256, |
|
"train_steps_per_second": 0.08 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1731, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.4199600073374433e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|