|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.997069903725408, |
|
"global_step": 179000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.986214594670016e-05, |
|
"loss": 7.1794, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9722617552671966e-05, |
|
"loss": 5.0163, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.958308915864379e-05, |
|
"loss": 4.3395, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9443560764615604e-05, |
|
"loss": 3.8052, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.930403237058742e-05, |
|
"loss": 3.2737, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9164503976559235e-05, |
|
"loss": 2.7787, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9024975582531044e-05, |
|
"loss": 2.3662, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8885447188502866e-05, |
|
"loss": 2.0976, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8745918794474674e-05, |
|
"loss": 1.842, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8606390400446497e-05, |
|
"loss": 1.5974, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8466862006418305e-05, |
|
"loss": 1.4216, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.832761266917818e-05, |
|
"loss": 1.3031, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8188084275149996e-05, |
|
"loss": 1.2708, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.804855588112181e-05, |
|
"loss": 1.1819, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.790902748709363e-05, |
|
"loss": 1.0589, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.776949909306544e-05, |
|
"loss": 1.0362, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.762997069903726e-05, |
|
"loss": 1.052, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7490442305009066e-05, |
|
"loss": 1.0126, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.735119296776894e-05, |
|
"loss": 1.0532, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.721166457374076e-05, |
|
"loss": 0.9005, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.707213617971257e-05, |
|
"loss": 0.9527, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.693260778568439e-05, |
|
"loss": 0.9698, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6793079391656204e-05, |
|
"loss": 0.9191, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.665355099762802e-05, |
|
"loss": 0.8907, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6514022603599834e-05, |
|
"loss": 0.8916, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.637449420957165e-05, |
|
"loss": 0.7784, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6234965815543465e-05, |
|
"loss": 0.8473, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6095716478303334e-05, |
|
"loss": 0.8101, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.5956188084275156e-05, |
|
"loss": 0.7118, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5816659690246965e-05, |
|
"loss": 0.7726, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.567713129621878e-05, |
|
"loss": 0.7623, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5537602902190596e-05, |
|
"loss": 0.7523, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.539807450816241e-05, |
|
"loss": 0.7224, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5258546114134227e-05, |
|
"loss": 0.7414, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.511901772010604e-05, |
|
"loss": 0.7231, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.497976838286592e-05, |
|
"loss": 0.6842, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4840519045625786e-05, |
|
"loss": 0.6876, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.47009906515976e-05, |
|
"loss": 0.6741, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.456146225756942e-05, |
|
"loss": 0.6214, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.442193386354123e-05, |
|
"loss": 0.684, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.428240546951305e-05, |
|
"loss": 0.6188, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4143156132272917e-05, |
|
"loss": 0.6574, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.400362773824473e-05, |
|
"loss": 0.6114, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3864099344216554e-05, |
|
"loss": 0.6181, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.372457095018836e-05, |
|
"loss": 0.6682, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3585042556160185e-05, |
|
"loss": 0.6209, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3445514162131994e-05, |
|
"loss": 0.6118, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3305985768103816e-05, |
|
"loss": 0.6713, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3166457374075625e-05, |
|
"loss": 0.5884, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.302692898004744e-05, |
|
"loss": 0.6232, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2887400586019255e-05, |
|
"loss": 0.5593, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.274815124877913e-05, |
|
"loss": 0.5509, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2608901911539e-05, |
|
"loss": 0.5983, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2469373517510815e-05, |
|
"loss": 0.5948, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.232984512348263e-05, |
|
"loss": 0.5583, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2190316729454446e-05, |
|
"loss": 0.5039, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.205078833542626e-05, |
|
"loss": 0.6184, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.191125994139808e-05, |
|
"loss": 0.5377, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1772010604157945e-05, |
|
"loss": 0.558, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.163248221012976e-05, |
|
"loss": 0.5396, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1492953816101576e-05, |
|
"loss": 0.5175, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.135342542207339e-05, |
|
"loss": 0.5602, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1213897028045214e-05, |
|
"loss": 0.5414, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.107436863401702e-05, |
|
"loss": 0.5585, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0934840239988845e-05, |
|
"loss": 0.5411, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.079531184596065e-05, |
|
"loss": 0.4735, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.065578345193247e-05, |
|
"loss": 0.5214, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0516255057904284e-05, |
|
"loss": 0.5409, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.037700572066416e-05, |
|
"loss": 0.4915, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0237477326635975e-05, |
|
"loss": 0.5052, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.009794893260779e-05, |
|
"loss": 0.5133, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9958420538579606e-05, |
|
"loss": 0.4739, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9818892144551415e-05, |
|
"loss": 0.5096, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.967964280731129e-05, |
|
"loss": 0.4766, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9540114413283105e-05, |
|
"loss": 0.476, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.940058601925492e-05, |
|
"loss": 0.526, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9261057625226736e-05, |
|
"loss": 0.4442, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9121808287986605e-05, |
|
"loss": 0.4361, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.898227989395842e-05, |
|
"loss": 0.4481, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8842751499930236e-05, |
|
"loss": 0.4919, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.870322310590205e-05, |
|
"loss": 0.4805, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.856369471187387e-05, |
|
"loss": 0.4876, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.842416631784568e-05, |
|
"loss": 0.4532, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.828491698060556e-05, |
|
"loss": 0.3834, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8145388586577366e-05, |
|
"loss": 0.4179, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.800586019254919e-05, |
|
"loss": 0.4828, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7866331798521e-05, |
|
"loss": 0.4436, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.772680340449282e-05, |
|
"loss": 0.464, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.758727501046463e-05, |
|
"loss": 0.4102, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7448025673224503e-05, |
|
"loss": 0.4299, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.730849727919632e-05, |
|
"loss": 0.4379, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7168968885168134e-05, |
|
"loss": 0.4455, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.702944049113995e-05, |
|
"loss": 0.4051, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6889912097111765e-05, |
|
"loss": 0.4395, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.675038370308358e-05, |
|
"loss": 0.4331, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.661085530905539e-05, |
|
"loss": 0.4652, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6471605971815265e-05, |
|
"loss": 0.4522, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.633207757778708e-05, |
|
"loss": 0.4568, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6192549183758895e-05, |
|
"loss": 0.4091, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.605302078973071e-05, |
|
"loss": 0.4307, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5913771452490586e-05, |
|
"loss": 0.3586, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5774243058462395e-05, |
|
"loss": 0.392, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.563499372122227e-05, |
|
"loss": 0.426, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5495465327194086e-05, |
|
"loss": 0.4123, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.53559369331659e-05, |
|
"loss": 0.4027, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.521640853913772e-05, |
|
"loss": 0.3684, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.507688014510953e-05, |
|
"loss": 0.3763, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.493735175108135e-05, |
|
"loss": 0.3719, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.479782335705316e-05, |
|
"loss": 0.4195, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.465829496302498e-05, |
|
"loss": 0.3923, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4518766568996794e-05, |
|
"loss": 0.4092, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.437923817496861e-05, |
|
"loss": 0.397, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.423970978094042e-05, |
|
"loss": 0.3882, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.410018138691224e-05, |
|
"loss": 0.4099, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.396121110646017e-05, |
|
"loss": 0.4156, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.382196176922004e-05, |
|
"loss": 0.3338, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.368243337519185e-05, |
|
"loss": 0.3582, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.354290498116367e-05, |
|
"loss": 0.3682, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3403376587135484e-05, |
|
"loss": 0.3707, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bleu": 36.3356, |
|
"eval_gen_len": 17.9313, |
|
"eval_loss": 4.4314866065979, |
|
"eval_runtime": 2285.8028, |
|
"eval_samples_per_second": 10.451, |
|
"eval_steps_per_second": 2.613, |
|
"step": 59725 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.32638481931073e-05, |
|
"loss": 4.5133, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3124319799079115e-05, |
|
"loss": 3.7997, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.298479140505093e-05, |
|
"loss": 3.0224, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.284526301102274e-05, |
|
"loss": 2.484, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2706013673782614e-05, |
|
"loss": 2.0495, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.256648527975443e-05, |
|
"loss": 1.6761, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2426956885726245e-05, |
|
"loss": 1.3831, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.228742849169806e-05, |
|
"loss": 1.2769, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2147900097669876e-05, |
|
"loss": 1.1351, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.200865076042975e-05, |
|
"loss": 0.9208, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.186912236640157e-05, |
|
"loss": 0.9154, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1729593972373376e-05, |
|
"loss": 0.8575, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.15900655783452e-05, |
|
"loss": 0.8328, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1450537184317006e-05, |
|
"loss": 0.7484, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.131100879028883e-05, |
|
"loss": 0.7488, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.11717594530487e-05, |
|
"loss": 0.6775, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.103223105902051e-05, |
|
"loss": 0.6732, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.089270266499233e-05, |
|
"loss": 0.6758, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0753174270964144e-05, |
|
"loss": 0.7088, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.061364587693596e-05, |
|
"loss": 0.6438, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.047411748290777e-05, |
|
"loss": 0.712, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0334868145667643e-05, |
|
"loss": 0.5968, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0195339751639462e-05, |
|
"loss": 0.6002, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0055811357611274e-05, |
|
"loss": 0.6417, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9916282963583093e-05, |
|
"loss": 0.6135, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9776754569554905e-05, |
|
"loss": 0.5759, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9637226175526724e-05, |
|
"loss": 0.5827, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9497697781498536e-05, |
|
"loss": 0.6051, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.935816938747035e-05, |
|
"loss": 0.5689, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9218920050230227e-05, |
|
"loss": 0.5623, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.907939165620204e-05, |
|
"loss": 0.5651, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8939863262173854e-05, |
|
"loss": 0.54, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8800334868145666e-05, |
|
"loss": 0.5728, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8661364587693595e-05, |
|
"loss": 0.6477, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8521836193665414e-05, |
|
"loss": 0.4917, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8382307799637226e-05, |
|
"loss": 0.5212, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8242779405609044e-05, |
|
"loss": 0.4962, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8103251011580856e-05, |
|
"loss": 0.5088, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7963722617552672e-05, |
|
"loss": 0.5211, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.782419422352449e-05, |
|
"loss": 0.4543, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7684665829496303e-05, |
|
"loss": 0.5363, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.754513743546812e-05, |
|
"loss": 0.4277, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.740588809822799e-05, |
|
"loss": 0.5252, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7266359704199806e-05, |
|
"loss": 0.4449, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7126831310171618e-05, |
|
"loss": 0.462, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6987581972931493e-05, |
|
"loss": 0.5038, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.684805357890331e-05, |
|
"loss": 0.4547, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.670852518487512e-05, |
|
"loss": 0.4703, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.656899679084694e-05, |
|
"loss": 0.4694, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.642946839681875e-05, |
|
"loss": 0.4339, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.628994000279057e-05, |
|
"loss": 0.4804, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6150411608762382e-05, |
|
"loss": 0.4237, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.60108832147342e-05, |
|
"loss": 0.4185, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5871633877494073e-05, |
|
"loss": 0.4328, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5732105483465885e-05, |
|
"loss": 0.4755, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5592577089437704e-05, |
|
"loss": 0.4228, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5453048695409516e-05, |
|
"loss": 0.4356, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.531352030138133e-05, |
|
"loss": 0.4154, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5174270964141207e-05, |
|
"loss": 0.4188, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.503474257011302e-05, |
|
"loss": 0.3696, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4895214176084834e-05, |
|
"loss": 0.3678, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.475568578205665e-05, |
|
"loss": 0.4446, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4616157388028465e-05, |
|
"loss": 0.4, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4476908050788337e-05, |
|
"loss": 0.3944, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.433737965676015e-05, |
|
"loss": 0.4156, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4197851262731965e-05, |
|
"loss": 0.3926, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4058322868703784e-05, |
|
"loss": 0.4252, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.39187944746756e-05, |
|
"loss": 0.338, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3779266080647414e-05, |
|
"loss": 0.3658, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.363973768661923e-05, |
|
"loss": 0.3664, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3500209292591045e-05, |
|
"loss": 0.4033, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3360680898562857e-05, |
|
"loss": 0.4257, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.322143156132273e-05, |
|
"loss": 0.3724, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.30821822240826e-05, |
|
"loss": 0.3843, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2942653830054417e-05, |
|
"loss": 0.3394, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2803125436026232e-05, |
|
"loss": 0.3969, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2663597041998048e-05, |
|
"loss": 0.3733, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2524068647969863e-05, |
|
"loss": 0.3735, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.238454025394168e-05, |
|
"loss": 0.2917, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.224529091670155e-05, |
|
"loss": 0.3786, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2106041579461423e-05, |
|
"loss": 0.377, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1966513185433238e-05, |
|
"loss": 0.3519, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1826984791405054e-05, |
|
"loss": 0.3491, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.168745639737687e-05, |
|
"loss": 0.3668, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1547928003348685e-05, |
|
"loss": 0.3548, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1408399609320497e-05, |
|
"loss": 0.339, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1268871215292312e-05, |
|
"loss": 0.3558, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1129342821264127e-05, |
|
"loss": 0.3329, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0989814427235943e-05, |
|
"loss": 0.3354, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0850286033207758e-05, |
|
"loss": 0.3499, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0710757639179574e-05, |
|
"loss": 0.3045, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.057122924515139e-05, |
|
"loss": 0.3639, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0431700851123205e-05, |
|
"loss": 0.3274, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.029217245709502e-05, |
|
"loss": 0.3552, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0152923119854892e-05, |
|
"loss": 0.3479, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0013394725826707e-05, |
|
"loss": 0.3079, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9873866331798523e-05, |
|
"loss": 0.3119, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9734337937770335e-05, |
|
"loss": 0.3404, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9595367657318264e-05, |
|
"loss": 0.3073, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.945583926329008e-05, |
|
"loss": 0.354, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9316310869261895e-05, |
|
"loss": 0.2939, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9176782475233713e-05, |
|
"loss": 0.2612, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.903725408120553e-05, |
|
"loss": 0.2841, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8898004743965397e-05, |
|
"loss": 0.3047, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8758476349937213e-05, |
|
"loss": 0.3214, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8618947955909028e-05, |
|
"loss": 0.2973, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8479419561880844e-05, |
|
"loss": 0.286, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.833989116785266e-05, |
|
"loss": 0.3119, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.820064183061253e-05, |
|
"loss": 0.3027, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8061113436584347e-05, |
|
"loss": 0.269, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7921585042556162e-05, |
|
"loss": 0.3149, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7782056648527974e-05, |
|
"loss": 0.266, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.764252825449979e-05, |
|
"loss": 0.2511, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7502999860471605e-05, |
|
"loss": 0.2835, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7363471466443424e-05, |
|
"loss": 0.2729, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7224222129203292e-05, |
|
"loss": 0.3032, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7084693735175108e-05, |
|
"loss": 0.2998, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6945165341146923e-05, |
|
"loss": 0.2854, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.680563694711874e-05, |
|
"loss": 0.3025, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bleu": 0.0, |
|
"eval_gen_len": 1.0, |
|
"eval_loss": 0.17790496349334717, |
|
"eval_runtime": 13498.8684, |
|
"eval_samples_per_second": 1.77, |
|
"eval_steps_per_second": 0.442, |
|
"step": 119450 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6666108553090554e-05, |
|
"loss": 0.2868, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.652658015906237e-05, |
|
"loss": 0.196, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6387051765034185e-05, |
|
"loss": 0.185, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6247802427794057e-05, |
|
"loss": 0.1974, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6108274033765873e-05, |
|
"loss": 0.2136, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5968745639737688e-05, |
|
"loss": 0.2086, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5829217245709503e-05, |
|
"loss": 0.216, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5689688851681315e-05, |
|
"loss": 0.1977, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.555043951444119e-05, |
|
"loss": 0.2043, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5410911120413006e-05, |
|
"loss": 0.1911, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.527138272638482e-05, |
|
"loss": 0.1675, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5131854332356635e-05, |
|
"loss": 0.1937, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.499232593832845e-05, |
|
"loss": 0.1846, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4852797544300265e-05, |
|
"loss": 0.1573, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.471326915027208e-05, |
|
"loss": 0.1788, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4573740756243897e-05, |
|
"loss": 0.1968, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4434212362215713e-05, |
|
"loss": 0.1916, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4294683968187528e-05, |
|
"loss": 0.201, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4155434630947398e-05, |
|
"loss": 0.1529, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4015906236919212e-05, |
|
"loss": 0.1937, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3876377842891028e-05, |
|
"loss": 0.1873, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3736849448862843e-05, |
|
"loss": 0.2018, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.359732105483466e-05, |
|
"loss": 0.165, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3457792660806476e-05, |
|
"loss": 0.1879, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3318264266778291e-05, |
|
"loss": 0.1975, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3179014929538161e-05, |
|
"loss": 0.1946, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3039765592298033e-05, |
|
"loss": 0.197, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2900237198269849e-05, |
|
"loss": 0.1875, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2760708804241664e-05, |
|
"loss": 0.1752, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.262118041021348e-05, |
|
"loss": 0.2013, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2481652016185293e-05, |
|
"loss": 0.1874, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2342123622157109e-05, |
|
"loss": 0.1816, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2202595228128926e-05, |
|
"loss": 0.1905, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.206306683410074e-05, |
|
"loss": 0.1757, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1923817496860612e-05, |
|
"loss": 0.1781, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1784289102832427e-05, |
|
"loss": 0.1525, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1644760708804243e-05, |
|
"loss": 0.1943, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1505232314776056e-05, |
|
"loss": 0.1624, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1365982977535928e-05, |
|
"loss": 0.1695, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1226454583507746e-05, |
|
"loss": 0.1371, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.108692618947956e-05, |
|
"loss": 0.1761, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0947397795451375e-05, |
|
"loss": 0.1635, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.080786940142319e-05, |
|
"loss": 0.1871, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0668341007395006e-05, |
|
"loss": 0.1883, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0528812613366821e-05, |
|
"loss": 0.143, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0389284219338635e-05, |
|
"loss": 0.1668, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0250034882098508e-05, |
|
"loss": 0.1651, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0110506488070324e-05, |
|
"loss": 0.1565, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.970978094042138e-06, |
|
"loss": 0.1569, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.831449700013953e-06, |
|
"loss": 0.1394, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.692200362773825e-06, |
|
"loss": 0.1721, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.55267196874564e-06, |
|
"loss": 0.1852, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.413143574717454e-06, |
|
"loss": 0.1423, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.273615180689271e-06, |
|
"loss": 0.1476, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.134086786661087e-06, |
|
"loss": 0.1507, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.994837449420957e-06, |
|
"loss": 0.1497, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.855309055392773e-06, |
|
"loss": 0.1698, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.715780661364588e-06, |
|
"loss": 0.1337, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.576252267336403e-06, |
|
"loss": 0.1584, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.437002930096274e-06, |
|
"loss": 0.1518, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.297474536068091e-06, |
|
"loss": 0.1411, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.158225198827963e-06, |
|
"loss": 0.1524, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.018696804799777e-06, |
|
"loss": 0.1524, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.879168410771592e-06, |
|
"loss": 0.1278, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.739640016743408e-06, |
|
"loss": 0.13, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.600111622715222e-06, |
|
"loss": 0.1278, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.460583228687038e-06, |
|
"loss": 0.1244, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.321054834658854e-06, |
|
"loss": 0.1629, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.181526440630669e-06, |
|
"loss": 0.1547, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.041998046602484e-06, |
|
"loss": 0.1604, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.902748709362355e-06, |
|
"loss": 0.1345, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.763220315334171e-06, |
|
"loss": 0.1454, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.623691921305986e-06, |
|
"loss": 0.1352, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.484163527277802e-06, |
|
"loss": 0.131, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.344635133249617e-06, |
|
"loss": 0.1378, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.205106739221432e-06, |
|
"loss": 0.1464, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.065578345193247e-06, |
|
"loss": 0.135, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.926049951165062e-06, |
|
"loss": 0.1393, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.7868006139249335e-06, |
|
"loss": 0.1269, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.647551276684806e-06, |
|
"loss": 0.1239, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.508022882656621e-06, |
|
"loss": 0.147, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.368494488628436e-06, |
|
"loss": 0.1212, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.228966094600252e-06, |
|
"loss": 0.1501, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.089716757360123e-06, |
|
"loss": 0.1348, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.9501883633319385e-06, |
|
"loss": 0.1282, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.810659969303753e-06, |
|
"loss": 0.1324, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.671131575275569e-06, |
|
"loss": 0.1255, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.531603181247384e-06, |
|
"loss": 0.1458, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.392074787219199e-06, |
|
"loss": 0.138, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.252546393191015e-06, |
|
"loss": 0.1269, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.113017999162829e-06, |
|
"loss": 0.1467, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.973489605134645e-06, |
|
"loss": 0.1186, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.834240267894517e-06, |
|
"loss": 0.1267, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.694711873866332e-06, |
|
"loss": 0.1003, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5551834798381473e-06, |
|
"loss": 0.1155, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.4159341425980186e-06, |
|
"loss": 0.1194, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.2764057485698344e-06, |
|
"loss": 0.1241, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1368773545416494e-06, |
|
"loss": 0.1243, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.997348960513465e-06, |
|
"loss": 0.1165, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.85782056648528e-06, |
|
"loss": 0.1174, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7182921724570953e-06, |
|
"loss": 0.1177, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5787637784289107e-06, |
|
"loss": 0.1456, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.439514441188782e-06, |
|
"loss": 0.1301, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.2999860471605973e-06, |
|
"loss": 0.1068, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1604576531324123e-06, |
|
"loss": 0.1018, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0209292591042278e-06, |
|
"loss": 0.1045, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.881400865076043e-06, |
|
"loss": 0.1187, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7418724710478584e-06, |
|
"loss": 0.1267, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6023440770196736e-06, |
|
"loss": 0.1319, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4628156829914888e-06, |
|
"loss": 0.1053, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3235663457513605e-06, |
|
"loss": 0.118, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1840379517231757e-06, |
|
"loss": 0.1208, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.044509557694991e-06, |
|
"loss": 0.1093, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.049811636668062e-07, |
|
"loss": 0.1144, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.657318264266779e-07, |
|
"loss": 0.1174, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.262034323984931e-07, |
|
"loss": 0.1143, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.866750383703084e-07, |
|
"loss": 0.1121, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.4742570113018e-07, |
|
"loss": 0.1347, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.0789730710199526e-07, |
|
"loss": 0.0946, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.836891307381053e-08, |
|
"loss": 0.1065, |
|
"step": 179000 |
|
} |
|
], |
|
"max_steps": 179175, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.3247389362520064e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|