|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.808988764044944, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.988295880149813e-05, |
|
"loss": 1.7828, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.976591760299626e-05, |
|
"loss": 1.7581, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.964887640449438e-05, |
|
"loss": 1.7499, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.953183520599251e-05, |
|
"loss": 1.7351, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.941479400749064e-05, |
|
"loss": 1.6748, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.929775280898877e-05, |
|
"loss": 1.6994, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.918071161048689e-05, |
|
"loss": 1.6761, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.906367041198502e-05, |
|
"loss": 1.6459, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.8946629213483144e-05, |
|
"loss": 1.6372, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.882958801498127e-05, |
|
"loss": 1.6715, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.87125468164794e-05, |
|
"loss": 1.6141, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.859550561797753e-05, |
|
"loss": 1.6354, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8478464419475654e-05, |
|
"loss": 1.6343, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.836142322097378e-05, |
|
"loss": 1.6072, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.824438202247191e-05, |
|
"loss": 1.6131, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.812734082397004e-05, |
|
"loss": 1.6301, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.801029962546817e-05, |
|
"loss": 1.597, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.789325842696629e-05, |
|
"loss": 1.5489, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.777621722846442e-05, |
|
"loss": 1.5998, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.765917602996255e-05, |
|
"loss": 1.6023, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.754213483146068e-05, |
|
"loss": 1.6847, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.742509363295881e-05, |
|
"loss": 1.5816, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.730805243445693e-05, |
|
"loss": 1.673, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.719101123595506e-05, |
|
"loss": 1.6192, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.707397003745318e-05, |
|
"loss": 1.6183, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.695692883895131e-05, |
|
"loss": 1.6262, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.683988764044944e-05, |
|
"loss": 1.5945, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6722846441947563e-05, |
|
"loss": 1.659, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.660580524344569e-05, |
|
"loss": 1.6135, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.648876404494382e-05, |
|
"loss": 1.6102, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.637172284644195e-05, |
|
"loss": 1.6202, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.625468164794008e-05, |
|
"loss": 1.6499, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.61376404494382e-05, |
|
"loss": 1.656, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.602059925093633e-05, |
|
"loss": 1.629, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.590355805243446e-05, |
|
"loss": 1.6194, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.578651685393259e-05, |
|
"loss": 1.6308, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.566947565543072e-05, |
|
"loss": 1.6013, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.555243445692884e-05, |
|
"loss": 1.6103, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.543539325842697e-05, |
|
"loss": 1.6281, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.531835205992509e-05, |
|
"loss": 1.5684, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.520131086142322e-05, |
|
"loss": 1.6165, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.508426966292135e-05, |
|
"loss": 1.6187, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.496722846441947e-05, |
|
"loss": 1.6211, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.48501872659176e-05, |
|
"loss": 1.6104, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.473314606741573e-05, |
|
"loss": 1.6058, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.461610486891386e-05, |
|
"loss": 1.5639, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.449906367041199e-05, |
|
"loss": 1.5879, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.438202247191011e-05, |
|
"loss": 1.5911, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.426498127340824e-05, |
|
"loss": 1.6679, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.414794007490637e-05, |
|
"loss": 1.578, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.40308988764045e-05, |
|
"loss": 1.6275, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.391385767790263e-05, |
|
"loss": 1.5811, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.379681647940075e-05, |
|
"loss": 1.5688, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.367977528089888e-05, |
|
"loss": 1.6364, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3562734082397e-05, |
|
"loss": 1.5866, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.344569288389513e-05, |
|
"loss": 1.6161, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.332865168539326e-05, |
|
"loss": 1.6424, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.321161048689138e-05, |
|
"loss": 1.623, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.309456928838951e-05, |
|
"loss": 1.5842, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.297752808988764e-05, |
|
"loss": 1.6077, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.286048689138577e-05, |
|
"loss": 1.6583, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.27434456928839e-05, |
|
"loss": 1.6026, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.262640449438202e-05, |
|
"loss": 1.6292, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.250936329588015e-05, |
|
"loss": 1.6292, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.239232209737828e-05, |
|
"loss": 1.5891, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.227528089887641e-05, |
|
"loss": 1.6386, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.215823970037454e-05, |
|
"loss": 1.6162, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.204119850187267e-05, |
|
"loss": 1.5891, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.192415730337079e-05, |
|
"loss": 1.614, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.180711610486892e-05, |
|
"loss": 1.6487, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.169007490636704e-05, |
|
"loss": 1.6591, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.157303370786517e-05, |
|
"loss": 1.5986, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.14559925093633e-05, |
|
"loss": 1.6107, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.133895131086142e-05, |
|
"loss": 1.6195, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.122191011235955e-05, |
|
"loss": 1.6278, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.110486891385768e-05, |
|
"loss": 1.5987, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.098782771535581e-05, |
|
"loss": 1.5816, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.087078651685394e-05, |
|
"loss": 1.6183, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.075374531835206e-05, |
|
"loss": 1.5796, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.063670411985019e-05, |
|
"loss": 1.6078, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.051966292134832e-05, |
|
"loss": 1.5659, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.040262172284645e-05, |
|
"loss": 1.6024, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.028558052434458e-05, |
|
"loss": 1.6321, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.01685393258427e-05, |
|
"loss": 1.6139, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.005149812734083e-05, |
|
"loss": 1.5753, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.993445692883895e-05, |
|
"loss": 1.6109, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.981741573033708e-05, |
|
"loss": 1.6098, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.970037453183521e-05, |
|
"loss": 1.6277, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.958333333333333e-05, |
|
"loss": 1.661, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.946629213483146e-05, |
|
"loss": 1.6179, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.934925093632959e-05, |
|
"loss": 1.6417, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.923220973782772e-05, |
|
"loss": 1.622, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.911516853932585e-05, |
|
"loss": 1.6275, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.899812734082397e-05, |
|
"loss": 1.6297, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.88810861423221e-05, |
|
"loss": 1.6049, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.876404494382023e-05, |
|
"loss": 1.6347, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.864700374531836e-05, |
|
"loss": 1.5998, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8529962546816486e-05, |
|
"loss": 1.6305, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.841292134831461e-05, |
|
"loss": 1.6281, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.829588014981274e-05, |
|
"loss": 1.6223, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.817883895131087e-05, |
|
"loss": 1.591, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.806179775280899e-05, |
|
"loss": 1.6133, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.794475655430712e-05, |
|
"loss": 1.6158, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.782771535580524e-05, |
|
"loss": 1.622, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.771067415730337e-05, |
|
"loss": 1.6461, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.75936329588015e-05, |
|
"loss": 1.6638, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.747659176029963e-05, |
|
"loss": 1.6071, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.735955056179776e-05, |
|
"loss": 1.6439, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.724250936329588e-05, |
|
"loss": 1.6111, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.712546816479401e-05, |
|
"loss": 1.5904, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.700842696629214e-05, |
|
"loss": 1.6312, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.689138576779027e-05, |
|
"loss": 1.6094, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6774344569288396e-05, |
|
"loss": 1.5809, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.665730337078652e-05, |
|
"loss": 1.6235, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.654026217228465e-05, |
|
"loss": 1.6241, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6423220973782776e-05, |
|
"loss": 1.594, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.63061797752809e-05, |
|
"loss": 1.6158, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.618913857677903e-05, |
|
"loss": 1.6182, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.607209737827715e-05, |
|
"loss": 1.6509, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.595505617977528e-05, |
|
"loss": 1.5991, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.583801498127341e-05, |
|
"loss": 1.6338, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.572097378277154e-05, |
|
"loss": 1.6244, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.560393258426967e-05, |
|
"loss": 1.5973, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.548689138576779e-05, |
|
"loss": 1.626, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.536985018726592e-05, |
|
"loss": 1.6008, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.525280898876405e-05, |
|
"loss": 1.6197, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5135767790262176e-05, |
|
"loss": 1.5381, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5018726591760305e-05, |
|
"loss": 1.6108, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.490168539325843e-05, |
|
"loss": 1.6042, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.478464419475656e-05, |
|
"loss": 1.5814, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4667602996254686e-05, |
|
"loss": 1.614, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.455056179775281e-05, |
|
"loss": 1.5813, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.443352059925094e-05, |
|
"loss": 1.6115, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.431647940074906e-05, |
|
"loss": 1.5899, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.419943820224719e-05, |
|
"loss": 1.631, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.408239700374532e-05, |
|
"loss": 1.6057, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.396535580524345e-05, |
|
"loss": 1.5936, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3848314606741576e-05, |
|
"loss": 1.6068, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.37312734082397e-05, |
|
"loss": 1.6029, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.361423220973783e-05, |
|
"loss": 1.6374, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.349719101123596e-05, |
|
"loss": 1.6115, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3380149812734086e-05, |
|
"loss": 1.6287, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3263108614232215e-05, |
|
"loss": 1.5845, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.314606741573034e-05, |
|
"loss": 1.5682, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3029026217228467e-05, |
|
"loss": 1.5966, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2911985018726596e-05, |
|
"loss": 1.6527, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2794943820224725e-05, |
|
"loss": 1.5991, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.267790262172285e-05, |
|
"loss": 1.6472, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2560861423220976e-05, |
|
"loss": 1.5972, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.24438202247191e-05, |
|
"loss": 1.5871, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.232677902621723e-05, |
|
"loss": 1.5687, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.220973782771536e-05, |
|
"loss": 1.6235, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2092696629213486e-05, |
|
"loss": 1.6073, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.197565543071161e-05, |
|
"loss": 1.5895, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.185861423220974e-05, |
|
"loss": 1.6319, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1741573033707866e-05, |
|
"loss": 1.5958, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1624531835205996e-05, |
|
"loss": 1.6232, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1507490636704125e-05, |
|
"loss": 1.6514, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.139044943820225e-05, |
|
"loss": 1.6175, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1273408239700376e-05, |
|
"loss": 1.5959, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1156367041198505e-05, |
|
"loss": 1.6223, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.1039325842696634e-05, |
|
"loss": 1.6289, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.092228464419476e-05, |
|
"loss": 1.6393, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0805243445692886e-05, |
|
"loss": 1.5833, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.068820224719101e-05, |
|
"loss": 1.5563, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.057116104868914e-05, |
|
"loss": 1.6156, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.045411985018727e-05, |
|
"loss": 1.567, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0337078651685396e-05, |
|
"loss": 1.5583, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0220037453183518e-05, |
|
"loss": 1.6089, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0102996254681647e-05, |
|
"loss": 1.6113, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9985955056179776e-05, |
|
"loss": 1.6204, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9868913857677905e-05, |
|
"loss": 1.6012, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9751872659176034e-05, |
|
"loss": 1.61, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9634831460674157e-05, |
|
"loss": 1.6197, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9517790262172286e-05, |
|
"loss": 1.6477, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.940074906367041e-05, |
|
"loss": 1.6033, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.928370786516854e-05, |
|
"loss": 1.6398, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 1.5934, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9049625468164792e-05, |
|
"loss": 1.6086, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.893258426966292e-05, |
|
"loss": 1.6349, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.881554307116105e-05, |
|
"loss": 1.5624, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.869850187265918e-05, |
|
"loss": 1.5814, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8581460674157305e-05, |
|
"loss": 1.5955, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.846441947565543e-05, |
|
"loss": 1.6163, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8347378277153557e-05, |
|
"loss": 1.6423, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8230337078651686e-05, |
|
"loss": 1.5882, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8113295880149815e-05, |
|
"loss": 1.5747, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7996254681647944e-05, |
|
"loss": 1.6375, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7879213483146066e-05, |
|
"loss": 1.577, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7762172284644195e-05, |
|
"loss": 1.6285, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7645131086142325e-05, |
|
"loss": 1.603, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.752808988764045e-05, |
|
"loss": 1.5859, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.741104868913858e-05, |
|
"loss": 1.5735, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7294007490636702e-05, |
|
"loss": 1.5936, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.717696629213483e-05, |
|
"loss": 1.5934, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.705992509363296e-05, |
|
"loss": 1.5972, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.694288389513109e-05, |
|
"loss": 1.6575, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6825842696629215e-05, |
|
"loss": 1.5937, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.670880149812734e-05, |
|
"loss": 1.5552, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6591760299625466e-05, |
|
"loss": 1.6175, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6474719101123595e-05, |
|
"loss": 1.58, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6357677902621724e-05, |
|
"loss": 1.6338, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6240636704119854e-05, |
|
"loss": 1.605, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6123595505617983e-05, |
|
"loss": 1.6046, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6006554307116105e-05, |
|
"loss": 1.6041, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5889513108614234e-05, |
|
"loss": 1.594, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.577247191011236e-05, |
|
"loss": 1.5991, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.565543071161049e-05, |
|
"loss": 1.6344, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5538389513108618e-05, |
|
"loss": 1.6152, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.542134831460674e-05, |
|
"loss": 1.5778, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.530430711610487e-05, |
|
"loss": 1.629, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5187265917603e-05, |
|
"loss": 1.5921, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5070224719101128e-05, |
|
"loss": 1.6092, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4953183520599254e-05, |
|
"loss": 1.6009, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.483614232209738e-05, |
|
"loss": 1.601, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4719101123595505e-05, |
|
"loss": 1.5833, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4602059925093634e-05, |
|
"loss": 1.5711, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.448501872659176e-05, |
|
"loss": 1.599, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.436797752808989e-05, |
|
"loss": 1.599, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4250936329588018e-05, |
|
"loss": 1.6161, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4133895131086144e-05, |
|
"loss": 1.5779, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.401685393258427e-05, |
|
"loss": 1.6316, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3899812734082395e-05, |
|
"loss": 1.5623, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3782771535580524e-05, |
|
"loss": 1.6113, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3665730337078653e-05, |
|
"loss": 1.5844, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.354868913857678e-05, |
|
"loss": 1.5771, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3431647940074908e-05, |
|
"loss": 1.5795, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3314606741573034e-05, |
|
"loss": 1.5784, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3197565543071163e-05, |
|
"loss": 1.6204, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.308052434456929e-05, |
|
"loss": 1.5829, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2963483146067415e-05, |
|
"loss": 1.5878, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2846441947565544e-05, |
|
"loss": 1.5838, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.272940074906367e-05, |
|
"loss": 1.6049, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.26123595505618e-05, |
|
"loss": 1.602, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2495318352059928e-05, |
|
"loss": 1.6357, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2378277153558053e-05, |
|
"loss": 1.5882, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2261235955056183e-05, |
|
"loss": 1.5904, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2144194756554308e-05, |
|
"loss": 1.6046, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2027153558052434e-05, |
|
"loss": 1.5691, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1910112359550563e-05, |
|
"loss": 1.5727, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.179307116104869e-05, |
|
"loss": 1.5936, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1676029962546818e-05, |
|
"loss": 1.6017, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1558988764044947e-05, |
|
"loss": 1.6201, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1441947565543073e-05, |
|
"loss": 1.6262, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1324906367041202e-05, |
|
"loss": 1.5802, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1207865168539328e-05, |
|
"loss": 1.5608, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1090823970037453e-05, |
|
"loss": 1.6072, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0973782771535582e-05, |
|
"loss": 1.6122, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0856741573033708e-05, |
|
"loss": 1.6387, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0739700374531837e-05, |
|
"loss": 1.5944, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0622659176029963e-05, |
|
"loss": 1.623, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0505617977528092e-05, |
|
"loss": 1.5679, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0388576779026218e-05, |
|
"loss": 1.5833, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0271535580524344e-05, |
|
"loss": 1.5898, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0154494382022473e-05, |
|
"loss": 1.5832, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.00374531835206e-05, |
|
"loss": 1.6074, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9920411985018728e-05, |
|
"loss": 1.5406, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9803370786516857e-05, |
|
"loss": 1.5898, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9686329588014982e-05, |
|
"loss": 1.6523, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.956928838951311e-05, |
|
"loss": 1.5475, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9452247191011237e-05, |
|
"loss": 1.611, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9335205992509363e-05, |
|
"loss": 1.644, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9218164794007492e-05, |
|
"loss": 1.6512, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9101123595505618e-05, |
|
"loss": 1.6181, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8984082397003747e-05, |
|
"loss": 1.5943, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8867041198501873e-05, |
|
"loss": 1.5644, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 1.5931, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.863295880149813e-05, |
|
"loss": 1.5999, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8515917602996257e-05, |
|
"loss": 1.6389, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8398876404494382e-05, |
|
"loss": 1.6019, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8281835205992508e-05, |
|
"loss": 1.6034, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8164794007490637e-05, |
|
"loss": 1.6172, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8047752808988766e-05, |
|
"loss": 1.584, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7930711610486892e-05, |
|
"loss": 1.6288, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.781367041198502e-05, |
|
"loss": 1.6515, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7696629213483147e-05, |
|
"loss": 1.6043, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7579588014981273e-05, |
|
"loss": 1.6518, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.74625468164794e-05, |
|
"loss": 1.6164, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7345505617977527e-05, |
|
"loss": 1.6004, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7228464419475657e-05, |
|
"loss": 1.6178, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7111423220973782e-05, |
|
"loss": 1.624, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.699438202247191e-05, |
|
"loss": 1.5782, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.687734082397004e-05, |
|
"loss": 1.6205, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6760299625468166e-05, |
|
"loss": 1.5928, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6643258426966292e-05, |
|
"loss": 1.6625, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6526217228464418e-05, |
|
"loss": 1.5908, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6409176029962547e-05, |
|
"loss": 1.6132, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6292134831460676e-05, |
|
"loss": 1.6542, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.61750936329588e-05, |
|
"loss": 1.5836, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.605805243445693e-05, |
|
"loss": 1.581, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5941011235955056e-05, |
|
"loss": 1.6075, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5823970037453186e-05, |
|
"loss": 1.6246, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.570692883895131e-05, |
|
"loss": 1.589, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5589887640449437e-05, |
|
"loss": 1.5989, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5472846441947566e-05, |
|
"loss": 1.6443, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5355805243445692e-05, |
|
"loss": 1.6154, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5238764044943821e-05, |
|
"loss": 1.571, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5121722846441948e-05, |
|
"loss": 1.6231, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5004681647940074e-05, |
|
"loss": 1.595, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4887640449438203e-05, |
|
"loss": 1.6098, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4770599250936332e-05, |
|
"loss": 1.5654, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4653558052434458e-05, |
|
"loss": 1.6087, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4536516853932586e-05, |
|
"loss": 1.6169, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4419475655430711e-05, |
|
"loss": 1.5326, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.430243445692884e-05, |
|
"loss": 1.5839, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4185393258426968e-05, |
|
"loss": 1.6051, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4068352059925093e-05, |
|
"loss": 1.595, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3951310861423223e-05, |
|
"loss": 1.6497, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3834269662921348e-05, |
|
"loss": 1.5961, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3717228464419476e-05, |
|
"loss": 1.5774, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3600187265917605e-05, |
|
"loss": 1.5494, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.348314606741573e-05, |
|
"loss": 1.6287, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.336610486891386e-05, |
|
"loss": 1.5876, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3249063670411985e-05, |
|
"loss": 1.5899, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3132022471910113e-05, |
|
"loss": 1.636, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3014981273408242e-05, |
|
"loss": 1.5857, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2897940074906368e-05, |
|
"loss": 1.5766, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2780898876404495e-05, |
|
"loss": 1.5976, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2663857677902621e-05, |
|
"loss": 1.6176, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.254681647940075e-05, |
|
"loss": 1.5601, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2429775280898877e-05, |
|
"loss": 1.6138, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2312734082397003e-05, |
|
"loss": 1.5693, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2195692883895132e-05, |
|
"loss": 1.5975, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.207865168539326e-05, |
|
"loss": 1.5745, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1961610486891387e-05, |
|
"loss": 1.5805, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1844569288389513e-05, |
|
"loss": 1.5644, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.172752808988764e-05, |
|
"loss": 1.5996, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.161048689138577e-05, |
|
"loss": 1.5899, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1493445692883897e-05, |
|
"loss": 1.5999, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1376404494382022e-05, |
|
"loss": 1.5971, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.125936329588015e-05, |
|
"loss": 1.6169, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1142322097378277e-05, |
|
"loss": 1.6032, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1025280898876405e-05, |
|
"loss": 1.6395, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0908239700374532e-05, |
|
"loss": 1.5836, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.079119850187266e-05, |
|
"loss": 1.6348, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0674157303370787e-05, |
|
"loss": 1.6234, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0557116104868914e-05, |
|
"loss": 1.5759, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0440074906367042e-05, |
|
"loss": 1.5938, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.032303370786517e-05, |
|
"loss": 1.6115, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0205992509363297e-05, |
|
"loss": 1.597, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0088951310861424e-05, |
|
"loss": 1.6085, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.97191011235955e-06, |
|
"loss": 1.6232, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.854868913857679e-06, |
|
"loss": 1.5851, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.737827715355806e-06, |
|
"loss": 1.5964, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.620786516853934e-06, |
|
"loss": 1.6511, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.50374531835206e-06, |
|
"loss": 1.591, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.386704119850187e-06, |
|
"loss": 1.6433, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.269662921348316e-06, |
|
"loss": 1.5838, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.152621722846442e-06, |
|
"loss": 1.5905, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.03558052434457e-06, |
|
"loss": 1.5938, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.918539325842697e-06, |
|
"loss": 1.5849, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.801498127340826e-06, |
|
"loss": 1.625, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.684456928838951e-06, |
|
"loss": 1.5588, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.567415730337079e-06, |
|
"loss": 1.6157, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.450374531835206e-06, |
|
"loss": 1.6203, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.5936, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.216292134831461e-06, |
|
"loss": 1.6212, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.099250936329589e-06, |
|
"loss": 1.5851, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.982209737827716e-06, |
|
"loss": 1.6167, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.865168539325843e-06, |
|
"loss": 1.6049, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.748127340823969e-06, |
|
"loss": 1.5873, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.631086142322098e-06, |
|
"loss": 1.5844, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.514044943820226e-06, |
|
"loss": 1.596, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.397003745318352e-06, |
|
"loss": 1.5847, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.27996254681648e-06, |
|
"loss": 1.6052, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.162921348314606e-06, |
|
"loss": 1.6181, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.045880149812735e-06, |
|
"loss": 1.6075, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.928838951310862e-06, |
|
"loss": 1.6222, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.811797752808989e-06, |
|
"loss": 1.6244, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.694756554307116e-06, |
|
"loss": 1.6182, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.577715355805243e-06, |
|
"loss": 1.6025, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.460674157303372e-06, |
|
"loss": 1.5655, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.343632958801499e-06, |
|
"loss": 1.6087, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.226591760299626e-06, |
|
"loss": 1.5929, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.109550561797753e-06, |
|
"loss": 1.5857, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.9925093632958805e-06, |
|
"loss": 1.6383, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.875468164794008e-06, |
|
"loss": 1.5726, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.758426966292135e-06, |
|
"loss": 1.6053, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.641385767790263e-06, |
|
"loss": 1.5869, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.52434456928839e-06, |
|
"loss": 1.6706, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.407303370786517e-06, |
|
"loss": 1.6114, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.290262172284644e-06, |
|
"loss": 1.5738, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.1732209737827716e-06, |
|
"loss": 1.593, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.056179775280899e-06, |
|
"loss": 1.5854, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.939138576779026e-06, |
|
"loss": 1.5302, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.822097378277154e-06, |
|
"loss": 1.596, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.705056179775281e-06, |
|
"loss": 1.5874, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.588014981273409e-06, |
|
"loss": 1.6144, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.470973782771535e-06, |
|
"loss": 1.5681, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.3539325842696635e-06, |
|
"loss": 1.5582, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.23689138576779e-06, |
|
"loss": 1.5442, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.1198501872659175e-06, |
|
"loss": 1.5907, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.002808988764045e-06, |
|
"loss": 1.5843, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.885767790262172e-06, |
|
"loss": 1.5797, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.7687265917603e-06, |
|
"loss": 1.5955, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.651685393258427e-06, |
|
"loss": 1.6296, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5346441947565546e-06, |
|
"loss": 1.567, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.417602996254682e-06, |
|
"loss": 1.6118, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.300561797752809e-06, |
|
"loss": 1.5625, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1835205992509364e-06, |
|
"loss": 1.5886, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4272, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.527467262873436e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|