|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.8612303290414878, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.988078206962327e-05, |
|
"loss": 1.7518, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.976156413924655e-05, |
|
"loss": 1.7804, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.964234620886982e-05, |
|
"loss": 1.727, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9523128278493086e-05, |
|
"loss": 1.6974, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.940391034811636e-05, |
|
"loss": 1.6927, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.928469241773963e-05, |
|
"loss": 1.6302, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.91654744873629e-05, |
|
"loss": 1.6878, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.904625655698618e-05, |
|
"loss": 1.6706, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.8927038626609446e-05, |
|
"loss": 1.6709, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8807820696232716e-05, |
|
"loss": 1.6876, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.868860276585599e-05, |
|
"loss": 1.6377, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.856938483547926e-05, |
|
"loss": 1.6348, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.845016690510253e-05, |
|
"loss": 1.6682, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8330948974725806e-05, |
|
"loss": 1.632, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8211731044349076e-05, |
|
"loss": 1.663, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8092513113972345e-05, |
|
"loss": 1.6638, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7973295183595614e-05, |
|
"loss": 1.5964, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.785407725321889e-05, |
|
"loss": 1.6738, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.773485932284216e-05, |
|
"loss": 1.6489, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.761564139246543e-05, |
|
"loss": 1.6609, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.74964234620887e-05, |
|
"loss": 1.6356, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7377205531711974e-05, |
|
"loss": 1.6409, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7257987601335244e-05, |
|
"loss": 1.6418, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.713876967095851e-05, |
|
"loss": 1.6824, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.701955174058178e-05, |
|
"loss": 1.6345, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.690033381020505e-05, |
|
"loss": 1.6219, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.678111587982833e-05, |
|
"loss": 1.6633, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.66618979494516e-05, |
|
"loss": 1.6067, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6542680019074866e-05, |
|
"loss": 1.653, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.642346208869814e-05, |
|
"loss": 1.6371, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.630424415832141e-05, |
|
"loss": 1.6492, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.618502622794468e-05, |
|
"loss": 1.6222, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.606580829756796e-05, |
|
"loss": 1.6677, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.5946590367191226e-05, |
|
"loss": 1.595, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5827372436814496e-05, |
|
"loss": 1.6309, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.570815450643777e-05, |
|
"loss": 1.6098, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.558893657606104e-05, |
|
"loss": 1.6175, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.546971864568431e-05, |
|
"loss": 1.6127, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5350500715307586e-05, |
|
"loss": 1.6326, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5231282784930856e-05, |
|
"loss": 1.626, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5112064854554125e-05, |
|
"loss": 1.5784, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.49928469241774e-05, |
|
"loss": 1.6449, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.487362899380067e-05, |
|
"loss": 1.619, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.475441106342394e-05, |
|
"loss": 1.636, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4635193133047216e-05, |
|
"loss": 1.6003, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4515975202670485e-05, |
|
"loss": 1.623, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4396757272293754e-05, |
|
"loss": 1.6109, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4277539341917024e-05, |
|
"loss": 1.656, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.41583214115403e-05, |
|
"loss": 1.6107, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.403910348116357e-05, |
|
"loss": 1.6136, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.391988555078684e-05, |
|
"loss": 1.6333, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3800667620410114e-05, |
|
"loss": 1.6259, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3681449690033384e-05, |
|
"loss": 1.6519, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.356223175965665e-05, |
|
"loss": 1.6325, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.344301382927993e-05, |
|
"loss": 1.632, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.33237958989032e-05, |
|
"loss": 1.6382, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.320457796852647e-05, |
|
"loss": 1.6114, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3085360038149744e-05, |
|
"loss": 1.5911, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.296614210777301e-05, |
|
"loss": 1.629, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.284692417739628e-05, |
|
"loss": 1.6607, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.272770624701956e-05, |
|
"loss": 1.6122, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.260848831664283e-05, |
|
"loss": 1.5969, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.24892703862661e-05, |
|
"loss": 1.6497, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.237005245588937e-05, |
|
"loss": 1.6098, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.225083452551264e-05, |
|
"loss": 1.6341, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.213161659513591e-05, |
|
"loss": 1.6244, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.201239866475918e-05, |
|
"loss": 1.6425, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.189318073438245e-05, |
|
"loss": 1.6436, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1773962804005726e-05, |
|
"loss": 1.6319, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1654744873628996e-05, |
|
"loss": 1.6273, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1535526943252265e-05, |
|
"loss": 1.6348, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1416309012875534e-05, |
|
"loss": 1.6447, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.129709108249881e-05, |
|
"loss": 1.637, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.117787315212208e-05, |
|
"loss": 1.603, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.105865522174535e-05, |
|
"loss": 1.6257, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0939437291368625e-05, |
|
"loss": 1.6277, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0820219360991894e-05, |
|
"loss": 1.6331, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0701001430615164e-05, |
|
"loss": 1.6434, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.058178350023844e-05, |
|
"loss": 1.6513, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.046256556986171e-05, |
|
"loss": 1.6015, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.034334763948498e-05, |
|
"loss": 1.6598, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.022412970910825e-05, |
|
"loss": 1.6007, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0104911778731524e-05, |
|
"loss": 1.5911, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.998569384835479e-05, |
|
"loss": 1.637, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.986647591797806e-05, |
|
"loss": 1.572, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.974725798760134e-05, |
|
"loss": 1.6409, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.962804005722461e-05, |
|
"loss": 1.641, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.950882212684788e-05, |
|
"loss": 1.6375, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.938960419647115e-05, |
|
"loss": 1.6491, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.927038626609442e-05, |
|
"loss": 1.6672, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.915116833571769e-05, |
|
"loss": 1.6263, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.903195040534097e-05, |
|
"loss": 1.5962, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.891273247496424e-05, |
|
"loss": 1.6026, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8793514544587506e-05, |
|
"loss": 1.645, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.867429661421078e-05, |
|
"loss": 1.6275, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.855507868383405e-05, |
|
"loss": 1.6154, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.843586075345732e-05, |
|
"loss": 1.6322, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.83166428230806e-05, |
|
"loss": 1.6122, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8197424892703866e-05, |
|
"loss": 1.6158, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8078206962327136e-05, |
|
"loss": 1.5995, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.795898903195041e-05, |
|
"loss": 1.643, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.783977110157368e-05, |
|
"loss": 1.6384, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.772055317119695e-05, |
|
"loss": 1.6193, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7601335240820226e-05, |
|
"loss": 1.5834, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7482117310443496e-05, |
|
"loss": 1.5957, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7362899380066765e-05, |
|
"loss": 1.6524, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.724368144969004e-05, |
|
"loss": 1.6312, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.712446351931331e-05, |
|
"loss": 1.6618, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.700524558893658e-05, |
|
"loss": 1.6468, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.688602765855985e-05, |
|
"loss": 1.6162, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.676680972818312e-05, |
|
"loss": 1.6427, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6647591797806394e-05, |
|
"loss": 1.6342, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6528373867429664e-05, |
|
"loss": 1.6279, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.640915593705293e-05, |
|
"loss": 1.6184, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.62899380066762e-05, |
|
"loss": 1.639, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.617072007629947e-05, |
|
"loss": 1.6186, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.605150214592275e-05, |
|
"loss": 1.6145, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.593228421554602e-05, |
|
"loss": 1.6153, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5813066285169286e-05, |
|
"loss": 1.6699, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.569384835479256e-05, |
|
"loss": 1.6411, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.557463042441583e-05, |
|
"loss": 1.6127, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.54554124940391e-05, |
|
"loss": 1.6248, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.533619456366238e-05, |
|
"loss": 1.6298, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5216976633285646e-05, |
|
"loss": 1.6356, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5097758702908915e-05, |
|
"loss": 1.6054, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.497854077253219e-05, |
|
"loss": 1.6315, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.485932284215546e-05, |
|
"loss": 1.6579, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.474010491177873e-05, |
|
"loss": 1.6055, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4620886981402006e-05, |
|
"loss": 1.669, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4501669051025275e-05, |
|
"loss": 1.6502, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4382451120648545e-05, |
|
"loss": 1.5906, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.426323319027182e-05, |
|
"loss": 1.6137, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.414401525989509e-05, |
|
"loss": 1.6185, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.402479732951836e-05, |
|
"loss": 1.6337, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3905579399141636e-05, |
|
"loss": 1.644, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3786361468764905e-05, |
|
"loss": 1.6663, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3667143538388174e-05, |
|
"loss": 1.6261, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.354792560801145e-05, |
|
"loss": 1.5996, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.342870767763472e-05, |
|
"loss": 1.6521, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.330948974725799e-05, |
|
"loss": 1.6202, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3190271816881265e-05, |
|
"loss": 1.6083, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3071053886504534e-05, |
|
"loss": 1.6344, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.2951835956127803e-05, |
|
"loss": 1.5612, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.283261802575107e-05, |
|
"loss": 1.661, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.271340009537435e-05, |
|
"loss": 1.6015, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.259418216499762e-05, |
|
"loss": 1.6069, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.247496423462089e-05, |
|
"loss": 1.6421, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2355746304244164e-05, |
|
"loss": 1.6506, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.223652837386743e-05, |
|
"loss": 1.6048, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.21173104434907e-05, |
|
"loss": 1.6277, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.199809251311398e-05, |
|
"loss": 1.6266, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.187887458273725e-05, |
|
"loss": 1.6134, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.175965665236052e-05, |
|
"loss": 1.6256, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1640438721983786e-05, |
|
"loss": 1.5966, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.152122079160706e-05, |
|
"loss": 1.6091, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.140200286123033e-05, |
|
"loss": 1.6, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.12827849308536e-05, |
|
"loss": 1.5905, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.116356700047687e-05, |
|
"loss": 1.5827, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.104434907010014e-05, |
|
"loss": 1.6267, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.0925131139723415e-05, |
|
"loss": 1.5794, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0805913209346685e-05, |
|
"loss": 1.6232, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0686695278969954e-05, |
|
"loss": 1.6042, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.056747734859323e-05, |
|
"loss": 1.6362, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0448259418216503e-05, |
|
"loss": 1.6272, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0329041487839772e-05, |
|
"loss": 1.6534, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0209823557463045e-05, |
|
"loss": 1.6036, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0090605627086314e-05, |
|
"loss": 1.5962, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9971387696709587e-05, |
|
"loss": 1.6082, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.985216976633286e-05, |
|
"loss": 1.635, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.973295183595613e-05, |
|
"loss": 1.6109, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9613733905579398e-05, |
|
"loss": 1.6131, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9494515975202674e-05, |
|
"loss": 1.6101, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9375298044825943e-05, |
|
"loss": 1.6222, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9256080114449213e-05, |
|
"loss": 1.6028, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9136862184072482e-05, |
|
"loss": 1.6352, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9017644253695758e-05, |
|
"loss": 1.6593, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8898426323319027e-05, |
|
"loss": 1.6328, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8779208392942297e-05, |
|
"loss": 1.6306, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8659990462565573e-05, |
|
"loss": 1.6306, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8540772532188842e-05, |
|
"loss": 1.6451, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.842155460181211e-05, |
|
"loss": 1.6239, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8302336671435387e-05, |
|
"loss": 1.6662, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8183118741058657e-05, |
|
"loss": 1.5954, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8063900810681926e-05, |
|
"loss": 1.6209, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7944682880305202e-05, |
|
"loss": 1.6099, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.782546494992847e-05, |
|
"loss": 1.6162, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.770624701955174e-05, |
|
"loss": 1.6024, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7587029089175013e-05, |
|
"loss": 1.6022, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7467811158798286e-05, |
|
"loss": 1.6392, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7348593228421555e-05, |
|
"loss": 1.6093, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7229375298044828e-05, |
|
"loss": 1.6115, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7110157367668097e-05, |
|
"loss": 1.6599, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6990939437291367e-05, |
|
"loss": 1.6294, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6871721506914643e-05, |
|
"loss": 1.5933, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6752503576537912e-05, |
|
"loss": 1.5787, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.663328564616118e-05, |
|
"loss": 1.6136, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6514067715784457e-05, |
|
"loss": 1.5836, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6394849785407727e-05, |
|
"loss": 1.6532, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6275631855030996e-05, |
|
"loss": 1.6263, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6156413924654272e-05, |
|
"loss": 1.5703, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.603719599427754e-05, |
|
"loss": 1.6016, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.591797806390081e-05, |
|
"loss": 1.6305, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5798760133524087e-05, |
|
"loss": 1.5717, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5679542203147356e-05, |
|
"loss": 1.6152, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5560324272770625e-05, |
|
"loss": 1.6268, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.54411063423939e-05, |
|
"loss": 1.5941, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.532188841201717e-05, |
|
"loss": 1.5952, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.520267048164044e-05, |
|
"loss": 1.6374, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.508345255126371e-05, |
|
"loss": 1.5663, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4964234620886985e-05, |
|
"loss": 1.6751, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4845016690510255e-05, |
|
"loss": 1.6235, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4725798760133524e-05, |
|
"loss": 1.648, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4606580829756797e-05, |
|
"loss": 1.6217, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4487362899380066e-05, |
|
"loss": 1.6476, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.436814496900334e-05, |
|
"loss": 1.6202, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4248927038626608e-05, |
|
"loss": 1.6038, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.412970910824988e-05, |
|
"loss": 1.5808, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4010491177873153e-05, |
|
"loss": 1.6445, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3891273247496423e-05, |
|
"loss": 1.628, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3772055317119695e-05, |
|
"loss": 1.5915, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3652837386742968e-05, |
|
"loss": 1.6305, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3533619456366237e-05, |
|
"loss": 1.6429, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.341440152598951e-05, |
|
"loss": 1.6508, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3295183595612783e-05, |
|
"loss": 1.6106, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3175965665236052e-05, |
|
"loss": 1.6121, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3056747734859325e-05, |
|
"loss": 1.6539, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2937529804482597e-05, |
|
"loss": 1.6035, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2818311874105867e-05, |
|
"loss": 1.6209, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.269909394372914e-05, |
|
"loss": 1.6449, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2579876013352412e-05, |
|
"loss": 1.6389, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.246065808297568e-05, |
|
"loss": 1.6414, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2341440152598954e-05, |
|
"loss": 1.5901, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 1.6464, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2103004291845496e-05, |
|
"loss": 1.6014, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1983786361468765e-05, |
|
"loss": 1.6188, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1864568431092035e-05, |
|
"loss": 1.611, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1745350500715307e-05, |
|
"loss": 1.5914, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.162613257033858e-05, |
|
"loss": 1.6256, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.150691463996185e-05, |
|
"loss": 1.6341, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1387696709585122e-05, |
|
"loss": 1.6195, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1268478779208395e-05, |
|
"loss": 1.63, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1149260848831664e-05, |
|
"loss": 1.6151, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1030042918454937e-05, |
|
"loss": 1.6279, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.091082498807821e-05, |
|
"loss": 1.6005, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.079160705770148e-05, |
|
"loss": 1.6192, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.067238912732475e-05, |
|
"loss": 1.6034, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.055317119694802e-05, |
|
"loss": 1.6484, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0433953266571293e-05, |
|
"loss": 1.5989, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0314735336194566e-05, |
|
"loss": 1.6299, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0195517405817835e-05, |
|
"loss": 1.5793, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0076299475441108e-05, |
|
"loss": 1.5988, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.995708154506438e-05, |
|
"loss": 1.6183, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.983786361468765e-05, |
|
"loss": 1.6151, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9718645684310923e-05, |
|
"loss": 1.6206, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9599427753934195e-05, |
|
"loss": 1.5981, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9480209823557465e-05, |
|
"loss": 1.6269, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9360991893180734e-05, |
|
"loss": 1.6321, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9241773962804007e-05, |
|
"loss": 1.6094, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9122556032427276e-05, |
|
"loss": 1.6576, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.900333810205055e-05, |
|
"loss": 1.6178, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.888412017167382e-05, |
|
"loss": 1.5908, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.876490224129709e-05, |
|
"loss": 1.6156, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8645684310920363e-05, |
|
"loss": 1.6218, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8526466380543633e-05, |
|
"loss": 1.6309, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8407248450166905e-05, |
|
"loss": 1.6382, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8288030519790178e-05, |
|
"loss": 1.5685, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8168812589413447e-05, |
|
"loss": 1.6139, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.804959465903672e-05, |
|
"loss": 1.6174, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7930376728659993e-05, |
|
"loss": 1.6433, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7811158798283262e-05, |
|
"loss": 1.6356, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7691940867906535e-05, |
|
"loss": 1.6277, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7572722937529807e-05, |
|
"loss": 1.621, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7453505007153077e-05, |
|
"loss": 1.6146, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.733428707677635e-05, |
|
"loss": 1.61, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7215069146399622e-05, |
|
"loss": 1.6081, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.709585121602289e-05, |
|
"loss": 1.6355, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6976633285646164e-05, |
|
"loss": 1.6074, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6857415355269433e-05, |
|
"loss": 1.6287, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6738197424892706e-05, |
|
"loss": 1.6176, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6618979494515975e-05, |
|
"loss": 1.6046, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6499761564139244e-05, |
|
"loss": 1.5707, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6380543633762517e-05, |
|
"loss": 1.5756, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.626132570338579e-05, |
|
"loss": 1.6575, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.614210777300906e-05, |
|
"loss": 1.6337, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6022889842632332e-05, |
|
"loss": 1.5799, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5903671912255605e-05, |
|
"loss": 1.597, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5784453981878874e-05, |
|
"loss": 1.601, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5665236051502147e-05, |
|
"loss": 1.6362, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.554601812112542e-05, |
|
"loss": 1.6311, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.542680019074869e-05, |
|
"loss": 1.6315, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.530758226037196e-05, |
|
"loss": 1.6036, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5188364329995234e-05, |
|
"loss": 1.6067, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5069146399618503e-05, |
|
"loss": 1.6093, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4949928469241776e-05, |
|
"loss": 1.5882, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4830710538865045e-05, |
|
"loss": 1.6175, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4711492608488318e-05, |
|
"loss": 1.6685, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4592274678111589e-05, |
|
"loss": 1.6059, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4473056747734858e-05, |
|
"loss": 1.6274, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.435383881735813e-05, |
|
"loss": 1.5816, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4234620886981404e-05, |
|
"loss": 1.61, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4115402956604673e-05, |
|
"loss": 1.6236, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3996185026227945e-05, |
|
"loss": 1.5497, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3876967095851218e-05, |
|
"loss": 1.672, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3757749165474487e-05, |
|
"loss": 1.6228, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.363853123509776e-05, |
|
"loss": 1.597, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3519313304721031e-05, |
|
"loss": 1.6403, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3400095374344302e-05, |
|
"loss": 1.6139, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3280877443967573e-05, |
|
"loss": 1.6012, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3161659513590846e-05, |
|
"loss": 1.6226, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3042441583214115e-05, |
|
"loss": 1.5783, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2923223652837388e-05, |
|
"loss": 1.6102, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2804005722460657e-05, |
|
"loss": 1.5804, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.268478779208393e-05, |
|
"loss": 1.5914, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2565569861707202e-05, |
|
"loss": 1.6024, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2446351931330473e-05, |
|
"loss": 1.6067, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2327134000953744e-05, |
|
"loss": 1.5699, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2207916070577015e-05, |
|
"loss": 1.6144, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2088698140200286e-05, |
|
"loss": 1.5885, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1969480209823557e-05, |
|
"loss": 1.6316, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1850262279446828e-05, |
|
"loss": 1.6166, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1731044349070101e-05, |
|
"loss": 1.6275, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1611826418693372e-05, |
|
"loss": 1.6502, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1492608488316643e-05, |
|
"loss": 1.5856, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1373390557939914e-05, |
|
"loss": 1.6386, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1254172627563187e-05, |
|
"loss": 1.5871, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1134954697186458e-05, |
|
"loss": 1.604, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1015736766809729e-05, |
|
"loss": 1.6092, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0896518836433001e-05, |
|
"loss": 1.5899, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.077730090605627e-05, |
|
"loss": 1.5938, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0658082975679542e-05, |
|
"loss": 1.6361, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0538865045302813e-05, |
|
"loss": 1.575, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0419647114926085e-05, |
|
"loss": 1.6119, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0300429184549356e-05, |
|
"loss": 1.6057, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0181211254172627e-05, |
|
"loss": 1.6396, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.00619933237959e-05, |
|
"loss": 1.5937, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.942775393419171e-06, |
|
"loss": 1.645, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.823557463042442e-06, |
|
"loss": 1.6124, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.704339532665713e-06, |
|
"loss": 1.5902, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.585121602288986e-06, |
|
"loss": 1.6414, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.465903671912257e-06, |
|
"loss": 1.6385, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.346685741535526e-06, |
|
"loss": 1.5788, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.227467811158799e-06, |
|
"loss": 1.6273, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.10824988078207e-06, |
|
"loss": 1.5927, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.98903195040534e-06, |
|
"loss": 1.595, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.869814020028613e-06, |
|
"loss": 1.6533, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.750596089651884e-06, |
|
"loss": 1.6238, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.631378159275155e-06, |
|
"loss": 1.6202, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.512160228898426e-06, |
|
"loss": 1.5734, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.392942298521699e-06, |
|
"loss": 1.6395, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.27372436814497e-06, |
|
"loss": 1.6079, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.154506437768241e-06, |
|
"loss": 1.5643, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.035288507391512e-06, |
|
"loss": 1.6115, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.916070577014783e-06, |
|
"loss": 1.5857, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.796852646638054e-06, |
|
"loss": 1.6416, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.677634716261325e-06, |
|
"loss": 1.5686, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.558416785884598e-06, |
|
"loss": 1.5817, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.439198855507869e-06, |
|
"loss": 1.6246, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.31998092513114e-06, |
|
"loss": 1.6224, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.2007629947544116e-06, |
|
"loss": 1.6275, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.0815450643776825e-06, |
|
"loss": 1.6099, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.9623271340009535e-06, |
|
"loss": 1.5909, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.8431092036242245e-06, |
|
"loss": 1.6144, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.723891273247497e-06, |
|
"loss": 1.6144, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.604673342870768e-06, |
|
"loss": 1.5957, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.485455412494039e-06, |
|
"loss": 1.6161, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.366237482117311e-06, |
|
"loss": 1.658, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.247019551740582e-06, |
|
"loss": 1.6356, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.127801621363853e-06, |
|
"loss": 1.6253, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.008583690987125e-06, |
|
"loss": 1.5904, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.889365760610397e-06, |
|
"loss": 1.628, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.770147830233668e-06, |
|
"loss": 1.6204, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.650929899856939e-06, |
|
"loss": 1.5816, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.53171196948021e-06, |
|
"loss": 1.6178, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.4124940391034815e-06, |
|
"loss": 1.6105, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.293276108726753e-06, |
|
"loss": 1.6179, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.174058178350024e-06, |
|
"loss": 1.5802, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.054840247973295e-06, |
|
"loss": 1.6731, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.935622317596566e-06, |
|
"loss": 1.6021, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.816404387219838e-06, |
|
"loss": 1.599, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.697186456843109e-06, |
|
"loss": 1.6066, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.577968526466381e-06, |
|
"loss": 1.6071, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.458750596089652e-06, |
|
"loss": 1.5927, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.339532665712923e-06, |
|
"loss": 1.5765, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.220314735336195e-06, |
|
"loss": 1.6004, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.101096804959466e-06, |
|
"loss": 1.6267, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.981878874582738e-06, |
|
"loss": 1.5983, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.8626609442060095e-06, |
|
"loss": 1.6246, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.74344301382928e-06, |
|
"loss": 1.5923, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6242250834525515e-06, |
|
"loss": 1.6262, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5050071530758225e-06, |
|
"loss": 1.5887, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3857892226990943e-06, |
|
"loss": 1.637, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.2665712923223653e-06, |
|
"loss": 1.6374, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1473533619456367e-06, |
|
"loss": 1.6502, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.028135431568908e-06, |
|
"loss": 1.5977, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.908917501192179e-06, |
|
"loss": 1.6352, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.789699570815451e-06, |
|
"loss": 1.6112, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.6704816404387224e-06, |
|
"loss": 1.6442, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5512637100619934e-06, |
|
"loss": 1.6148, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.432045779685265e-06, |
|
"loss": 1.6348, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3128278493085362e-06, |
|
"loss": 1.5957, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4194, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.527783275284398e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|