|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 199995, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9874996874921875e-05, |
|
"loss": 3.2939, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.974999374984375e-05, |
|
"loss": 3.0017, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.962499062476562e-05, |
|
"loss": 2.8608, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.949998749968749e-05, |
|
"loss": 2.7653, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.937498437460937e-05, |
|
"loss": 2.681, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.924998124953124e-05, |
|
"loss": 2.6364, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9124978124453116e-05, |
|
"loss": 2.5911, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.899997499937499e-05, |
|
"loss": 2.5177, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.887497187429686e-05, |
|
"loss": 2.5272, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.874996874921873e-05, |
|
"loss": 2.4444, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8624965624140605e-05, |
|
"loss": 2.4071, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.849996249906248e-05, |
|
"loss": 2.4294, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.837495937398435e-05, |
|
"loss": 2.3753, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.824995624890622e-05, |
|
"loss": 2.3464, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.81249531238281e-05, |
|
"loss": 2.3459, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7999949998749974e-05, |
|
"loss": 2.3276, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7874946873671846e-05, |
|
"loss": 2.2796, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.774994374859372e-05, |
|
"loss": 2.3016, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.762494062351559e-05, |
|
"loss": 2.2536, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.749993749843746e-05, |
|
"loss": 2.2602, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7374934373359335e-05, |
|
"loss": 2.2181, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.724993124828121e-05, |
|
"loss": 2.2444, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.712492812320308e-05, |
|
"loss": 2.2094, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.699992499812495e-05, |
|
"loss": 2.1863, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.687492187304683e-05, |
|
"loss": 2.1926, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6749918747968704e-05, |
|
"loss": 2.1556, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6624915622890576e-05, |
|
"loss": 2.1883, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.649991249781245e-05, |
|
"loss": 2.1666, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.637490937273432e-05, |
|
"loss": 2.1424, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.624990624765619e-05, |
|
"loss": 2.1152, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6124903122578066e-05, |
|
"loss": 2.1066, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.599989999749994e-05, |
|
"loss": 2.1317, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.587489687242181e-05, |
|
"loss": 2.1149, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.574989374734369e-05, |
|
"loss": 2.0706, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.562489062226556e-05, |
|
"loss": 2.0609, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5499887497187434e-05, |
|
"loss": 2.0769, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5374884372109307e-05, |
|
"loss": 2.0559, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.524988124703118e-05, |
|
"loss": 2.0441, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.512487812195305e-05, |
|
"loss": 2.0601, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4999874996874924e-05, |
|
"loss": 2.0693, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4874871871796796e-05, |
|
"loss": 2.0679, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.474986874671867e-05, |
|
"loss": 2.0453, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.462486562164054e-05, |
|
"loss": 2.0092, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.449986249656242e-05, |
|
"loss": 2.0059, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.437485937148429e-05, |
|
"loss": 2.0032, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4249856246406165e-05, |
|
"loss": 2.0177, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.412485312132804e-05, |
|
"loss": 1.9968, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.399984999624991e-05, |
|
"loss": 1.9988, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.387484687117178e-05, |
|
"loss": 1.9992, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3749843746093654e-05, |
|
"loss": 1.9994, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3624840621015526e-05, |
|
"loss": 1.9856, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.34998374959374e-05, |
|
"loss": 1.9882, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.337483437085927e-05, |
|
"loss": 1.9656, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.324983124578115e-05, |
|
"loss": 1.9613, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.312482812070302e-05, |
|
"loss": 1.9467, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2999824995624895e-05, |
|
"loss": 1.9539, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.287482187054677e-05, |
|
"loss": 1.9718, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.274981874546864e-05, |
|
"loss": 1.9498, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.262481562039051e-05, |
|
"loss": 1.9421, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2499812495312384e-05, |
|
"loss": 1.9319, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2374809370234257e-05, |
|
"loss": 1.9388, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.224980624515613e-05, |
|
"loss": 1.905, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2124803120078e-05, |
|
"loss": 1.9323, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.199979999499988e-05, |
|
"loss": 1.9367, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.187479686992175e-05, |
|
"loss": 1.9217, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1749793744843625e-05, |
|
"loss": 1.9409, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.16247906197655e-05, |
|
"loss": 1.8921, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.149978749468737e-05, |
|
"loss": 1.9078, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.137478436960924e-05, |
|
"loss": 1.8943, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1249781244531115e-05, |
|
"loss": 1.8869, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.112477811945299e-05, |
|
"loss": 1.8941, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.099977499437486e-05, |
|
"loss": 1.8851, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.087477186929674e-05, |
|
"loss": 1.8956, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.074976874421861e-05, |
|
"loss": 1.8973, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.062476561914048e-05, |
|
"loss": 1.8878, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0499762494062355e-05, |
|
"loss": 1.8487, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.037475936898423e-05, |
|
"loss": 1.8884, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.02497562439061e-05, |
|
"loss": 1.8676, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.012475311882797e-05, |
|
"loss": 1.8499, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9999749993749845e-05, |
|
"loss": 1.8559, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.987474686867172e-05, |
|
"loss": 1.863, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.974974374359359e-05, |
|
"loss": 1.86, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.962474061851547e-05, |
|
"loss": 1.8411, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.949973749343734e-05, |
|
"loss": 1.8469, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9374734368359213e-05, |
|
"loss": 1.8404, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9249731243281086e-05, |
|
"loss": 1.855, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.912472811820296e-05, |
|
"loss": 1.8326, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.899972499312483e-05, |
|
"loss": 1.8078, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.88747218680467e-05, |
|
"loss": 1.8437, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8749718742968575e-05, |
|
"loss": 1.8437, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.862471561789045e-05, |
|
"loss": 1.8297, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.849971249281232e-05, |
|
"loss": 1.8333, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.83747093677342e-05, |
|
"loss": 1.835, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.824970624265607e-05, |
|
"loss": 1.8329, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8124703117577944e-05, |
|
"loss": 1.8214, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7999699992499816e-05, |
|
"loss": 1.8136, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.787469686742169e-05, |
|
"loss": 1.8138, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.774969374234356e-05, |
|
"loss": 1.7895, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.762469061726543e-05, |
|
"loss": 1.8076, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7499687492187305e-05, |
|
"loss": 1.7935, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.737468436710918e-05, |
|
"loss": 1.8139, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.724968124203106e-05, |
|
"loss": 1.7982, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.712467811695293e-05, |
|
"loss": 1.8233, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.69996749918748e-05, |
|
"loss": 1.7931, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6874671866796674e-05, |
|
"loss": 1.7879, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6749668741718546e-05, |
|
"loss": 1.7863, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.662466561664042e-05, |
|
"loss": 1.7788, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.649966249156229e-05, |
|
"loss": 1.7713, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6374659366484163e-05, |
|
"loss": 1.7693, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6249656241406036e-05, |
|
"loss": 1.7801, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.612465311632791e-05, |
|
"loss": 1.7827, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.599964999124979e-05, |
|
"loss": 1.7424, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.587464686617166e-05, |
|
"loss": 1.783, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.574964374109353e-05, |
|
"loss": 1.7444, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5624640616015404e-05, |
|
"loss": 1.7505, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.549963749093728e-05, |
|
"loss": 1.7765, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.537463436585915e-05, |
|
"loss": 1.7915, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.524963124078102e-05, |
|
"loss": 1.7407, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5124628115702894e-05, |
|
"loss": 1.7584, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4999624990624766e-05, |
|
"loss": 1.756, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.487462186554664e-05, |
|
"loss": 1.7643, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.474961874046852e-05, |
|
"loss": 1.7417, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.462461561539039e-05, |
|
"loss": 1.7739, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.449961249031226e-05, |
|
"loss": 1.74, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4374609365234135e-05, |
|
"loss": 1.7517, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.424960624015601e-05, |
|
"loss": 1.7384, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.412460311507788e-05, |
|
"loss": 1.7276, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.399959998999975e-05, |
|
"loss": 1.7359, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3874596864921624e-05, |
|
"loss": 1.7279, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3749593739843496e-05, |
|
"loss": 1.7402, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3624590614765376e-05, |
|
"loss": 1.7468, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.349958748968725e-05, |
|
"loss": 1.7205, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.337458436460912e-05, |
|
"loss": 1.7191, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.324958123953099e-05, |
|
"loss": 1.6651, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3124578114452865e-05, |
|
"loss": 1.5845, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.299957498937474e-05, |
|
"loss": 1.602, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.287457186429661e-05, |
|
"loss": 1.6055, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.274956873921848e-05, |
|
"loss": 1.5997, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2624565614140354e-05, |
|
"loss": 1.6062, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.249956248906223e-05, |
|
"loss": 1.6117, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2374559363984106e-05, |
|
"loss": 1.6268, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.224955623890598e-05, |
|
"loss": 1.601, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.212455311382785e-05, |
|
"loss": 1.5859, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.199954998874972e-05, |
|
"loss": 1.6313, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1874546863671595e-05, |
|
"loss": 1.595, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.174954373859347e-05, |
|
"loss": 1.6144, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.162454061351534e-05, |
|
"loss": 1.5991, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.149953748843721e-05, |
|
"loss": 1.5637, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1374534363359085e-05, |
|
"loss": 1.5957, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.124953123828096e-05, |
|
"loss": 1.588, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1124528113202836e-05, |
|
"loss": 1.5894, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.099952498812471e-05, |
|
"loss": 1.5897, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.087452186304658e-05, |
|
"loss": 1.5835, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.074951873796845e-05, |
|
"loss": 1.6129, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0624515612890326e-05, |
|
"loss": 1.5726, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0499512487812198e-05, |
|
"loss": 1.5672, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.037450936273407e-05, |
|
"loss": 1.599, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0249506237655946e-05, |
|
"loss": 1.5861, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.012450311257782e-05, |
|
"loss": 1.5977, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.999949998749969e-05, |
|
"loss": 1.5801, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9874496862421563e-05, |
|
"loss": 1.5989, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9749493737343435e-05, |
|
"loss": 1.5762, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.962449061226531e-05, |
|
"loss": 1.5779, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9499487487187184e-05, |
|
"loss": 1.5988, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9374484362109056e-05, |
|
"loss": 1.5652, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9249481237030928e-05, |
|
"loss": 1.5817, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.91244781119528e-05, |
|
"loss": 1.535, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8999474986874676e-05, |
|
"loss": 1.5596, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.887447186179655e-05, |
|
"loss": 1.5719, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.874946873671842e-05, |
|
"loss": 1.562, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8624465611640293e-05, |
|
"loss": 1.5764, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8499462486562166e-05, |
|
"loss": 1.5751, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.837445936148404e-05, |
|
"loss": 1.5826, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8249456236405914e-05, |
|
"loss": 1.5793, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8124453111327786e-05, |
|
"loss": 1.5526, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.799944998624966e-05, |
|
"loss": 1.5444, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7874446861171534e-05, |
|
"loss": 1.5663, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7749443736093407e-05, |
|
"loss": 1.5701, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.762444061101528e-05, |
|
"loss": 1.58, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.749943748593715e-05, |
|
"loss": 1.531, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7374434360859024e-05, |
|
"loss": 1.5674, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.72494312357809e-05, |
|
"loss": 1.5475, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7124428110702772e-05, |
|
"loss": 1.5701, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6999424985624644e-05, |
|
"loss": 1.5722, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6874421860546516e-05, |
|
"loss": 1.5633, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.674941873546839e-05, |
|
"loss": 1.5682, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6624415610390265e-05, |
|
"loss": 1.5839, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6499412485312137e-05, |
|
"loss": 1.5587, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.637440936023401e-05, |
|
"loss": 1.5585, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.624940623515588e-05, |
|
"loss": 1.5573, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6124403110077754e-05, |
|
"loss": 1.5558, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.599939998499963e-05, |
|
"loss": 1.5818, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5874396859921502e-05, |
|
"loss": 1.5416, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5749393734843374e-05, |
|
"loss": 1.5566, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5624390609765247e-05, |
|
"loss": 1.573, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.549938748468712e-05, |
|
"loss": 1.5625, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5374384359608995e-05, |
|
"loss": 1.5688, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5249381234530867e-05, |
|
"loss": 1.5468, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.512437810945274e-05, |
|
"loss": 1.5511, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.499937498437461e-05, |
|
"loss": 1.5325, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4874371859296484e-05, |
|
"loss": 1.5505, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4749368734218357e-05, |
|
"loss": 1.535, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.462436560914023e-05, |
|
"loss": 1.5471, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.44993624840621e-05, |
|
"loss": 1.5607, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4374359358983974e-05, |
|
"loss": 1.5426, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.424935623390585e-05, |
|
"loss": 1.5488, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4124353108827722e-05, |
|
"loss": 1.5321, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3999349983749594e-05, |
|
"loss": 1.528, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3874346858671467e-05, |
|
"loss": 1.5416, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.374934373359334e-05, |
|
"loss": 1.5228, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3624340608515215e-05, |
|
"loss": 1.5114, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3499337483437087e-05, |
|
"loss": 1.502, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.337433435835896e-05, |
|
"loss": 1.5264, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.324933123328083e-05, |
|
"loss": 1.5065, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3124328108202707e-05, |
|
"loss": 1.525, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.299932498312458e-05, |
|
"loss": 1.5159, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2874321858046452e-05, |
|
"loss": 1.521, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2749318732968324e-05, |
|
"loss": 1.5116, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2624315607890197e-05, |
|
"loss": 1.5392, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2499312482812073e-05, |
|
"loss": 1.5245, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2374309357733945e-05, |
|
"loss": 1.5392, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2249306232655817e-05, |
|
"loss": 1.5114, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.212430310757769e-05, |
|
"loss": 1.55, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1999299982499562e-05, |
|
"loss": 1.487, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1874296857421438e-05, |
|
"loss": 1.5393, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.174929373234331e-05, |
|
"loss": 1.529, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1624290607265182e-05, |
|
"loss": 1.5167, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1499287482187055e-05, |
|
"loss": 1.5101, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1374284357108927e-05, |
|
"loss": 1.5113, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1249281232030803e-05, |
|
"loss": 1.5353, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1124278106952675e-05, |
|
"loss": 1.4933, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0999274981874548e-05, |
|
"loss": 1.5104, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.087427185679642e-05, |
|
"loss": 1.4918, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0749268731718292e-05, |
|
"loss": 1.518, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0624265606640168e-05, |
|
"loss": 1.4962, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.049926248156204e-05, |
|
"loss": 1.5395, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0374259356483913e-05, |
|
"loss": 1.5027, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0249256231405785e-05, |
|
"loss": 1.5285, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0124253106327657e-05, |
|
"loss": 1.5141, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9999249981249533e-05, |
|
"loss": 1.4661, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9874246856171406e-05, |
|
"loss": 1.5139, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9749243731093278e-05, |
|
"loss": 1.4976, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.962424060601515e-05, |
|
"loss": 1.4833, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9499237480937026e-05, |
|
"loss": 1.4846, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.93742343558589e-05, |
|
"loss": 1.4874, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.924923123078077e-05, |
|
"loss": 1.4739, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9124228105702643e-05, |
|
"loss": 1.502, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8999224980624515e-05, |
|
"loss": 1.4991, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.887422185554639e-05, |
|
"loss": 1.4695, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8749218730468263e-05, |
|
"loss": 1.5081, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8624215605390136e-05, |
|
"loss": 1.4977, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8499212480312008e-05, |
|
"loss": 1.4885, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.837420935523388e-05, |
|
"loss": 1.4812, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8249206230155756e-05, |
|
"loss": 1.4861, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.812420310507763e-05, |
|
"loss": 1.4895, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.79991999799995e-05, |
|
"loss": 1.4823, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7874196854921373e-05, |
|
"loss": 1.4821, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7749193729843246e-05, |
|
"loss": 1.5037, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.762419060476512e-05, |
|
"loss": 1.4783, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7499187479686994e-05, |
|
"loss": 1.5003, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7374184354608866e-05, |
|
"loss": 1.4846, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.724918122953074e-05, |
|
"loss": 1.4976, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.712417810445261e-05, |
|
"loss": 1.4989, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6999174979374487e-05, |
|
"loss": 1.4746, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.687417185429636e-05, |
|
"loss": 1.4871, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.674916872921823e-05, |
|
"loss": 1.494, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6624165604140104e-05, |
|
"loss": 1.4163, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6499162479061976e-05, |
|
"loss": 1.3369, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6374159353983852e-05, |
|
"loss": 1.3468, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6249156228905724e-05, |
|
"loss": 1.3477, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6124153103827596e-05, |
|
"loss": 1.3267, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.599914997874947e-05, |
|
"loss": 1.3454, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.587414685367134e-05, |
|
"loss": 1.3346, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5749143728593217e-05, |
|
"loss": 1.3609, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.562414060351509e-05, |
|
"loss": 1.3203, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.549913747843696e-05, |
|
"loss": 1.3446, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5374134353358834e-05, |
|
"loss": 1.3297, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5249131228280708e-05, |
|
"loss": 1.3478, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.512412810320258e-05, |
|
"loss": 1.3475, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4999124978124454e-05, |
|
"loss": 1.2979, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4874121853046327e-05, |
|
"loss": 1.3368, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.47491187279682e-05, |
|
"loss": 1.3378, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4624115602890073e-05, |
|
"loss": 1.3591, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4499112477811947e-05, |
|
"loss": 1.3627, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.437410935273382e-05, |
|
"loss": 1.3245, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4249106227655692e-05, |
|
"loss": 1.3322, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4124103102577566e-05, |
|
"loss": 1.3387, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3999099977499438e-05, |
|
"loss": 1.3613, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3874096852421312e-05, |
|
"loss": 1.3205, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3749093727343185e-05, |
|
"loss": 1.3701, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3624090602265057e-05, |
|
"loss": 1.3483, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3499087477186931e-05, |
|
"loss": 1.3294, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3374084352108803e-05, |
|
"loss": 1.3255, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3249081227030678e-05, |
|
"loss": 1.3477, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.312407810195255e-05, |
|
"loss": 1.3608, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2999074976874422e-05, |
|
"loss": 1.3364, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2874071851796296e-05, |
|
"loss": 1.3534, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2749068726718169e-05, |
|
"loss": 1.347, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2624065601640043e-05, |
|
"loss": 1.3482, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2499062476561915e-05, |
|
"loss": 1.356, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2374059351483789e-05, |
|
"loss": 1.336, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2249056226405661e-05, |
|
"loss": 1.3676, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2124053101327534e-05, |
|
"loss": 1.3444, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1999049976249408e-05, |
|
"loss": 1.3574, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.187404685117128e-05, |
|
"loss": 1.3093, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1749043726093154e-05, |
|
"loss": 1.3318, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1624040601015027e-05, |
|
"loss": 1.3291, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1499037475936899e-05, |
|
"loss": 1.3255, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1374034350858773e-05, |
|
"loss": 1.3142, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1249031225780645e-05, |
|
"loss": 1.3293, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.112402810070252e-05, |
|
"loss": 1.342, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0999024975624392e-05, |
|
"loss": 1.308, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0874021850546264e-05, |
|
"loss": 1.3136, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0749018725468136e-05, |
|
"loss": 1.3274, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0624015600390009e-05, |
|
"loss": 1.3306, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0499012475311883e-05, |
|
"loss": 1.3347, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0374009350233755e-05, |
|
"loss": 1.3135, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.024900622515563e-05, |
|
"loss": 1.3239, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0124003100077502e-05, |
|
"loss": 1.3085, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.998999974999376e-06, |
|
"loss": 1.3323, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.873996849921248e-06, |
|
"loss": 1.3372, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.74899372484312e-06, |
|
"loss": 1.317, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.623990599764994e-06, |
|
"loss": 1.3273, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.498987474686867e-06, |
|
"loss": 1.3139, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.37398434960874e-06, |
|
"loss": 1.3027, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.248981224530613e-06, |
|
"loss": 1.3, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.123978099452486e-06, |
|
"loss": 1.3465, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.99897497437436e-06, |
|
"loss": 1.3039, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.873971849296232e-06, |
|
"loss": 1.3188, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.748968724218106e-06, |
|
"loss": 1.3457, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.623965599139978e-06, |
|
"loss": 1.3377, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.49896247406185e-06, |
|
"loss": 1.3137, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.373959348983725e-06, |
|
"loss": 1.3265, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.248956223905597e-06, |
|
"loss": 1.3429, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.123953098827471e-06, |
|
"loss": 1.3233, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.998949973749343e-06, |
|
"loss": 1.2967, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.873946848671218e-06, |
|
"loss": 1.3232, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.74894372359309e-06, |
|
"loss": 1.2925, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.623940598514963e-06, |
|
"loss": 1.3128, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.498937473436836e-06, |
|
"loss": 1.3273, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.373934348358709e-06, |
|
"loss": 1.3101, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.248931223280582e-06, |
|
"loss": 1.3044, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.123928098202455e-06, |
|
"loss": 1.2993, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.998924973124328e-06, |
|
"loss": 1.3313, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.873921848046201e-06, |
|
"loss": 1.3018, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.748918722968075e-06, |
|
"loss": 1.3022, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.623915597889947e-06, |
|
"loss": 1.2963, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.49891247281182e-06, |
|
"loss": 1.3185, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.373909347733693e-06, |
|
"loss": 1.3228, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.2489062226555666e-06, |
|
"loss": 1.3196, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.12390309757744e-06, |
|
"loss": 1.3052, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.998899972499312e-06, |
|
"loss": 1.2999, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.873896847421185e-06, |
|
"loss": 1.3266, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.7488937223430585e-06, |
|
"loss": 1.3008, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.623890597264932e-06, |
|
"loss": 1.316, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.498887472186805e-06, |
|
"loss": 1.3147, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.373884347108678e-06, |
|
"loss": 1.3141, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.2488812220305505e-06, |
|
"loss": 1.3112, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.123878096952424e-06, |
|
"loss": 1.3116, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.998874971874297e-06, |
|
"loss": 1.3018, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.87387184679617e-06, |
|
"loss": 1.3224, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.748868721718043e-06, |
|
"loss": 1.2892, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6238655966399165e-06, |
|
"loss": 1.2784, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.498862471561789e-06, |
|
"loss": 1.3124, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.373859346483662e-06, |
|
"loss": 1.3137, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.248856221405535e-06, |
|
"loss": 1.3028, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.1238530963274084e-06, |
|
"loss": 1.3232, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.998849971249282e-06, |
|
"loss": 1.2967, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.873846846171154e-06, |
|
"loss": 1.285, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7488437210930276e-06, |
|
"loss": 1.2919, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6238405960149004e-06, |
|
"loss": 1.3022, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4988374709367736e-06, |
|
"loss": 1.2914, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.373834345858647e-06, |
|
"loss": 1.3033, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.2488312207805196e-06, |
|
"loss": 1.2823, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1238280957023928e-06, |
|
"loss": 1.3115, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.998824970624266e-06, |
|
"loss": 1.2941, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8738218455461387e-06, |
|
"loss": 1.2785, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.748818720468012e-06, |
|
"loss": 1.2913, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.6238155953898847e-06, |
|
"loss": 1.3162, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.498812470311758e-06, |
|
"loss": 1.3114, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.373809345233631e-06, |
|
"loss": 1.2898, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.248806220155504e-06, |
|
"loss": 1.313, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.123803095077377e-06, |
|
"loss": 1.2896, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9987999699992503e-06, |
|
"loss": 1.3174, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8737968449211233e-06, |
|
"loss": 1.2951, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7487937198429963e-06, |
|
"loss": 1.3192, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6237905947648693e-06, |
|
"loss": 1.3053, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4987874696867423e-06, |
|
"loss": 1.3045, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3737843446086152e-06, |
|
"loss": 1.2992, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2487812195304882e-06, |
|
"loss": 1.2643, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1237780944523614e-06, |
|
"loss": 1.2917, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.987749693742344e-07, |
|
"loss": 1.294, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.737718442961075e-07, |
|
"loss": 1.289, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.487687192179805e-07, |
|
"loss": 1.2809, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.237655941398535e-07, |
|
"loss": 1.3175, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.987624690617266e-07, |
|
"loss": 1.3111, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.7375934398359957e-07, |
|
"loss": 1.3019, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.4875621890547267e-07, |
|
"loss": 1.3065, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.237530938273457e-07, |
|
"loss": 1.2757, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 199995, |
|
"total_flos": 3.01955982127104e+17, |
|
"train_loss": 1.6198288996531554, |
|
"train_runtime": 56046.7089, |
|
"train_samples_per_second": 35.683, |
|
"train_steps_per_second": 3.568 |
|
} |
|
], |
|
"max_steps": 199995, |
|
"num_train_epochs": 3, |
|
"total_flos": 3.01955982127104e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|