|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"global_step": 269968, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.990739643216974e-05, |
|
"loss": 2.8507, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9814792864339485e-05, |
|
"loss": 2.7365, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.972218929650922e-05, |
|
"loss": 2.7102, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9629585728678954e-05, |
|
"loss": 2.7208, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.953698216084869e-05, |
|
"loss": 2.7067, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9444378593018436e-05, |
|
"loss": 2.6698, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9351775025188174e-05, |
|
"loss": 2.6546, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.925917145735791e-05, |
|
"loss": 2.6446, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.916656788952765e-05, |
|
"loss": 2.6289, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9073964321697394e-05, |
|
"loss": 2.6532, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8981360753867125e-05, |
|
"loss": 2.5513, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.888875718603686e-05, |
|
"loss": 2.5434, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.87961536182066e-05, |
|
"loss": 2.5836, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8703550050376345e-05, |
|
"loss": 2.5433, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.861094648254608e-05, |
|
"loss": 2.5794, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.851834291471582e-05, |
|
"loss": 2.5457, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.842573934688556e-05, |
|
"loss": 2.5119, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.83331357790553e-05, |
|
"loss": 2.525, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8240532211225034e-05, |
|
"loss": 2.5471, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.814792864339477e-05, |
|
"loss": 2.5862, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.805532507556451e-05, |
|
"loss": 2.5391, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7962721507734254e-05, |
|
"loss": 2.5123, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.787011793990399e-05, |
|
"loss": 2.5144, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.777751437207373e-05, |
|
"loss": 2.5659, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.768491080424347e-05, |
|
"loss": 2.5536, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.7592307236413205e-05, |
|
"loss": 2.4725, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.749970366858294e-05, |
|
"loss": 2.4336, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.740710010075268e-05, |
|
"loss": 2.5392, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.7314496532922425e-05, |
|
"loss": 2.4867, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.722189296509216e-05, |
|
"loss": 2.4775, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.71292893972619e-05, |
|
"loss": 2.5399, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.703668582943164e-05, |
|
"loss": 2.4984, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6944082261601376e-05, |
|
"loss": 2.5248, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.6851478693771114e-05, |
|
"loss": 2.4343, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.675887512594085e-05, |
|
"loss": 2.4473, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.666627155811059e-05, |
|
"loss": 2.455, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.6573667990280334e-05, |
|
"loss": 2.4335, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.648106442245007e-05, |
|
"loss": 2.4983, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.638846085461981e-05, |
|
"loss": 2.4334, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.629585728678955e-05, |
|
"loss": 2.4077, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.6203253718959285e-05, |
|
"loss": 2.4161, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.611065015112902e-05, |
|
"loss": 2.4169, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.601804658329876e-05, |
|
"loss": 2.4123, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.59254430154685e-05, |
|
"loss": 2.4486, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.583283944763824e-05, |
|
"loss": 2.3579, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.574023587980798e-05, |
|
"loss": 2.355, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.564763231197772e-05, |
|
"loss": 2.3947, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.555502874414746e-05, |
|
"loss": 2.4513, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.5462425176317195e-05, |
|
"loss": 2.4414, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.536982160848693e-05, |
|
"loss": 2.4163, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.527721804065667e-05, |
|
"loss": 2.4108, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.518461447282641e-05, |
|
"loss": 2.3692, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.509201090499615e-05, |
|
"loss": 2.4022, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.499940733716589e-05, |
|
"loss": 2.3209, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.490680376933563e-05, |
|
"loss": 2.4336, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.4814200201505366e-05, |
|
"loss": 2.3763, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.4721596633675104e-05, |
|
"loss": 2.4228, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.462899306584484e-05, |
|
"loss": 2.4199, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.453638949801458e-05, |
|
"loss": 2.3636, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.4443785930184324e-05, |
|
"loss": 2.4332, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.435118236235406e-05, |
|
"loss": 2.4513, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.42585787945238e-05, |
|
"loss": 2.3083, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.416597522669354e-05, |
|
"loss": 2.3773, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.4073371658863275e-05, |
|
"loss": 2.3017, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.398076809103301e-05, |
|
"loss": 2.3439, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.388816452320275e-05, |
|
"loss": 2.3497, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.379556095537249e-05, |
|
"loss": 2.3305, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.370295738754223e-05, |
|
"loss": 2.3532, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.361035381971197e-05, |
|
"loss": 2.2562, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.351775025188171e-05, |
|
"loss": 2.3071, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.3425146684051446e-05, |
|
"loss": 2.3573, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.3332543116221184e-05, |
|
"loss": 2.3095, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.323993954839092e-05, |
|
"loss": 2.3321, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.314733598056066e-05, |
|
"loss": 2.2776, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.30547324127304e-05, |
|
"loss": 2.2757, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.296212884490014e-05, |
|
"loss": 2.3101, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.286952527706988e-05, |
|
"loss": 2.3099, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.277692170923962e-05, |
|
"loss": 2.3121, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.2684318141409355e-05, |
|
"loss": 2.3046, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.259171457357909e-05, |
|
"loss": 2.2414, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.249911100574883e-05, |
|
"loss": 2.2979, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.240650743791857e-05, |
|
"loss": 2.2733, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.2313903870088306e-05, |
|
"loss": 2.3319, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.222130030225805e-05, |
|
"loss": 2.3224, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.212869673442779e-05, |
|
"loss": 2.2873, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.2036093166597526e-05, |
|
"loss": 2.3584, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.1943489598767264e-05, |
|
"loss": 2.265, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.1850886030937e-05, |
|
"loss": 2.2914, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.175828246310674e-05, |
|
"loss": 2.2585, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.166567889527648e-05, |
|
"loss": 2.2645, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.157307532744622e-05, |
|
"loss": 2.3289, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.148047175961596e-05, |
|
"loss": 2.2665, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.13878681917857e-05, |
|
"loss": 2.2714, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.129526462395543e-05, |
|
"loss": 2.2622, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.120266105612517e-05, |
|
"loss": 2.2456, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.111005748829491e-05, |
|
"loss": 2.3016, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.101745392046465e-05, |
|
"loss": 2.2608, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.092485035263439e-05, |
|
"loss": 2.2983, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.083224678480413e-05, |
|
"loss": 2.2725, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.073964321697387e-05, |
|
"loss": 2.2304, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.064703964914361e-05, |
|
"loss": 2.3037, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.055443608131334e-05, |
|
"loss": 2.2449, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.046183251348308e-05, |
|
"loss": 2.256, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.036922894565282e-05, |
|
"loss": 2.3002, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.027662537782256e-05, |
|
"loss": 2.2611, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.0184021809992296e-05, |
|
"loss": 2.2351, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.009141824216204e-05, |
|
"loss": 2.2351, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.999881467433178e-05, |
|
"loss": 2.2658, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.990621110650151e-05, |
|
"loss": 2.2394, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.981360753867125e-05, |
|
"loss": 2.2527, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.972100397084099e-05, |
|
"loss": 2.2554, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.962840040301073e-05, |
|
"loss": 2.2116, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.953579683518047e-05, |
|
"loss": 2.2459, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.9443193267350205e-05, |
|
"loss": 2.2093, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.935058969951995e-05, |
|
"loss": 2.2384, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.925798613168968e-05, |
|
"loss": 2.222, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.916538256385942e-05, |
|
"loss": 2.1714, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.907277899602916e-05, |
|
"loss": 2.2678, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.89801754281989e-05, |
|
"loss": 2.259, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.888757186036864e-05, |
|
"loss": 2.1827, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.8794968292538376e-05, |
|
"loss": 2.2199, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.870236472470812e-05, |
|
"loss": 2.1635, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.860976115687786e-05, |
|
"loss": 2.126, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.851715758904759e-05, |
|
"loss": 2.277, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.842455402121733e-05, |
|
"loss": 2.1814, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.833195045338707e-05, |
|
"loss": 2.2185, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.823934688555681e-05, |
|
"loss": 2.1325, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.814674331772655e-05, |
|
"loss": 2.1752, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.8054139749896285e-05, |
|
"loss": 2.2558, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.796153618206603e-05, |
|
"loss": 2.1914, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.786893261423576e-05, |
|
"loss": 2.1828, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.77763290464055e-05, |
|
"loss": 2.1864, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7683725478575236e-05, |
|
"loss": 2.2374, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.759112191074498e-05, |
|
"loss": 2.2294, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.749851834291472e-05, |
|
"loss": 2.1764, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7405914775084456e-05, |
|
"loss": 1.8288, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7313311207254194e-05, |
|
"loss": 1.8335, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.722070763942393e-05, |
|
"loss": 1.789, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.712810407159367e-05, |
|
"loss": 1.8011, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.703550050376341e-05, |
|
"loss": 1.8464, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.6942896935933145e-05, |
|
"loss": 1.9109, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.685029336810289e-05, |
|
"loss": 1.8988, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.675768980027263e-05, |
|
"loss": 1.8336, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.6665086232442365e-05, |
|
"loss": 1.8093, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.657248266461211e-05, |
|
"loss": 1.8718, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.647987909678184e-05, |
|
"loss": 1.8582, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.638727552895158e-05, |
|
"loss": 1.8773, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.6294671961121317e-05, |
|
"loss": 1.845, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.620206839329106e-05, |
|
"loss": 1.8208, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.61094648254608e-05, |
|
"loss": 1.866, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.601686125763054e-05, |
|
"loss": 1.8354, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.5924257689800274e-05, |
|
"loss": 1.874, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.583165412197001e-05, |
|
"loss": 1.8157, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.573905055413975e-05, |
|
"loss": 1.8941, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.564644698630949e-05, |
|
"loss": 1.7856, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.5553843418479226e-05, |
|
"loss": 1.8355, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.546123985064897e-05, |
|
"loss": 1.8542, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.536863628281871e-05, |
|
"loss": 1.8734, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.5276032714988446e-05, |
|
"loss": 1.8471, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.5183429147158183e-05, |
|
"loss": 1.8843, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.509082557932792e-05, |
|
"loss": 1.8549, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.499822201149766e-05, |
|
"loss": 1.8794, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.49056184436674e-05, |
|
"loss": 1.8123, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.4813014875837135e-05, |
|
"loss": 1.9222, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.472041130800688e-05, |
|
"loss": 1.8118, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.462780774017662e-05, |
|
"loss": 1.8444, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.4535204172346355e-05, |
|
"loss": 1.8205, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.444260060451609e-05, |
|
"loss": 1.832, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.434999703668583e-05, |
|
"loss": 1.8681, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.425739346885557e-05, |
|
"loss": 1.8315, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.4164789901025306e-05, |
|
"loss": 1.8455, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.407218633319505e-05, |
|
"loss": 1.8116, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.397958276536479e-05, |
|
"loss": 1.8428, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.3886979197534526e-05, |
|
"loss": 1.8807, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.3794375629704264e-05, |
|
"loss": 1.848, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.3701772061874e-05, |
|
"loss": 1.8763, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.360916849404374e-05, |
|
"loss": 1.8613, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.351656492621348e-05, |
|
"loss": 1.887, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.3423961358383215e-05, |
|
"loss": 1.8228, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.333135779055296e-05, |
|
"loss": 1.8495, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.32387542227227e-05, |
|
"loss": 1.8136, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.3146150654892435e-05, |
|
"loss": 1.8473, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.305354708706217e-05, |
|
"loss": 1.8911, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.296094351923191e-05, |
|
"loss": 1.8469, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.286833995140165e-05, |
|
"loss": 1.8637, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.2775736383571386e-05, |
|
"loss": 1.8833, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.2683132815741124e-05, |
|
"loss": 1.7691, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.259052924791087e-05, |
|
"loss": 1.8607, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.2497925680080606e-05, |
|
"loss": 1.8724, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.2405322112250344e-05, |
|
"loss": 1.857, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.231271854442008e-05, |
|
"loss": 1.8516, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.222011497658982e-05, |
|
"loss": 1.8776, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.212751140875956e-05, |
|
"loss": 1.856, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.2034907840929295e-05, |
|
"loss": 1.8006, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.194230427309903e-05, |
|
"loss": 1.8157, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.184970070526878e-05, |
|
"loss": 1.888, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.1757097137438515e-05, |
|
"loss": 1.7996, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.166449356960825e-05, |
|
"loss": 1.8884, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.1571890001777984e-05, |
|
"loss": 1.8581, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.147928643394773e-05, |
|
"loss": 1.8047, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.1386682866117467e-05, |
|
"loss": 1.782, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.1294079298287204e-05, |
|
"loss": 1.8976, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.120147573045695e-05, |
|
"loss": 1.8875, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.110887216262669e-05, |
|
"loss": 1.8775, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.1016268594796424e-05, |
|
"loss": 1.8253, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.0923665026966155e-05, |
|
"loss": 1.8766, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.08310614591359e-05, |
|
"loss": 1.7809, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.073845789130564e-05, |
|
"loss": 1.8724, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.0645854323475376e-05, |
|
"loss": 1.87, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.055325075564511e-05, |
|
"loss": 1.888, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.0460647187814855e-05, |
|
"loss": 1.804, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.0368043619984592e-05, |
|
"loss": 1.8045, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.027544005215433e-05, |
|
"loss": 1.8211, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.0182836484324068e-05, |
|
"loss": 1.8786, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.009023291649381e-05, |
|
"loss": 1.863, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.9997629348663547e-05, |
|
"loss": 1.8576, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.9905025780833285e-05, |
|
"loss": 1.871, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.9812422213003022e-05, |
|
"loss": 1.8222, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.9719818645172764e-05, |
|
"loss": 1.8619, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.96272150773425e-05, |
|
"loss": 1.8899, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.953461150951224e-05, |
|
"loss": 1.8915, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.9442007941681977e-05, |
|
"loss": 1.8638, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.9349404373851718e-05, |
|
"loss": 1.8411, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.9256800806021456e-05, |
|
"loss": 1.8337, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.9164197238191194e-05, |
|
"loss": 1.8295, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.907159367036093e-05, |
|
"loss": 1.8165, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.8978990102530673e-05, |
|
"loss": 1.7852, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.888638653470041e-05, |
|
"loss": 1.806, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.8793782966870148e-05, |
|
"loss": 1.836, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.870117939903989e-05, |
|
"loss": 1.8174, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.8608575831209627e-05, |
|
"loss": 1.8736, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.8515972263379365e-05, |
|
"loss": 1.8421, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.8423368695549103e-05, |
|
"loss": 1.8666, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.8330765127718844e-05, |
|
"loss": 1.8455, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.8238161559888582e-05, |
|
"loss": 1.8277, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.814555799205832e-05, |
|
"loss": 1.7909, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.8052954424228057e-05, |
|
"loss": 1.7783, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.79603508563978e-05, |
|
"loss": 1.8208, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.7867747288567536e-05, |
|
"loss": 1.8484, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.7775143720737274e-05, |
|
"loss": 1.824, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.7682540152907012e-05, |
|
"loss": 1.8373, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.7589936585076753e-05, |
|
"loss": 1.8155, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.749733301724649e-05, |
|
"loss": 1.7846, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.740472944941623e-05, |
|
"loss": 1.8153, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.7312125881585966e-05, |
|
"loss": 1.8557, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.7219522313755708e-05, |
|
"loss": 1.8372, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.7126918745925445e-05, |
|
"loss": 1.8148, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.7034315178095183e-05, |
|
"loss": 1.7911, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.694171161026492e-05, |
|
"loss": 1.8467, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.6849108042434662e-05, |
|
"loss": 1.8121, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.67565044746044e-05, |
|
"loss": 1.8034, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.6663900906774138e-05, |
|
"loss": 1.7933, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.6571297338943872e-05, |
|
"loss": 1.8018, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.6478693771113617e-05, |
|
"loss": 1.7972, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.6386090203283354e-05, |
|
"loss": 1.8592, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.6293486635453092e-05, |
|
"loss": 1.8192, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.6200883067622833e-05, |
|
"loss": 1.828, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.610827949979257e-05, |
|
"loss": 1.8501, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.601567593196231e-05, |
|
"loss": 1.8408, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.5923072364132047e-05, |
|
"loss": 1.8387, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.5830468796301788e-05, |
|
"loss": 1.8467, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.5737865228471526e-05, |
|
"loss": 1.8348, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.5645261660641263e-05, |
|
"loss": 1.796, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.5552658092810998e-05, |
|
"loss": 1.804, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.5460054524980742e-05, |
|
"loss": 1.7959, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.536745095715048e-05, |
|
"loss": 1.8213, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.5274847389320218e-05, |
|
"loss": 1.7823, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.5182243821489952e-05, |
|
"loss": 1.7913, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.5089640253659697e-05, |
|
"loss": 1.7693, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.4997036685829435e-05, |
|
"loss": 1.797, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.4904433117999172e-05, |
|
"loss": 1.3749, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.481182955016891e-05, |
|
"loss": 1.3449, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.4719225982338648e-05, |
|
"loss": 1.387, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.462662241450839e-05, |
|
"loss": 1.3564, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.4534018846678124e-05, |
|
"loss": 1.3306, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.4441415278847865e-05, |
|
"loss": 1.3972, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.4348811711017603e-05, |
|
"loss": 1.3669, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.4256208143187344e-05, |
|
"loss": 1.4143, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.4163604575357078e-05, |
|
"loss": 1.3784, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.407100100752682e-05, |
|
"loss": 1.3759, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.3978397439696557e-05, |
|
"loss": 1.3384, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.3885793871866298e-05, |
|
"loss": 1.3865, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.3793190304036033e-05, |
|
"loss": 1.3423, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.3700586736205774e-05, |
|
"loss": 1.3576, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.360798316837551e-05, |
|
"loss": 1.3832, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.351537960054525e-05, |
|
"loss": 1.3412, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.342277603271499e-05, |
|
"loss": 1.4008, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.333017246488473e-05, |
|
"loss": 1.4198, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.323756889705447e-05, |
|
"loss": 1.3535, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.3144965329224204e-05, |
|
"loss": 1.4097, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.3052361761393945e-05, |
|
"loss": 1.409, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.2959758193563683e-05, |
|
"loss": 1.4151, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.2867154625733424e-05, |
|
"loss": 1.4083, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.277455105790316e-05, |
|
"loss": 1.4173, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.26819474900729e-05, |
|
"loss": 1.463, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.2589343922242637e-05, |
|
"loss": 1.4193, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.2496740354412375e-05, |
|
"loss": 1.4219, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.2404136786582113e-05, |
|
"loss": 1.3641, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.2311533218751854e-05, |
|
"loss": 1.3905, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.2218929650921592e-05, |
|
"loss": 1.4293, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.212632608309133e-05, |
|
"loss": 1.4548, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.2033722515261068e-05, |
|
"loss": 1.4201, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.194111894743081e-05, |
|
"loss": 1.3813, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.1848515379600546e-05, |
|
"loss": 1.3631, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.1755911811770284e-05, |
|
"loss": 1.4613, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.1663308243940022e-05, |
|
"loss": 1.37, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.1570704676109763e-05, |
|
"loss": 1.4528, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.14781011082795e-05, |
|
"loss": 1.3796, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.138549754044924e-05, |
|
"loss": 1.4019, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.1292893972618977e-05, |
|
"loss": 1.3995, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.1200290404788718e-05, |
|
"loss": 1.3776, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.1107686836958456e-05, |
|
"loss": 1.4263, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.1015083269128193e-05, |
|
"loss": 1.395, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.092247970129793e-05, |
|
"loss": 1.3474, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.0829876133467672e-05, |
|
"loss": 1.3742, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.073727256563741e-05, |
|
"loss": 1.4322, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.0644668997807148e-05, |
|
"loss": 1.3475, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.055206542997689e-05, |
|
"loss": 1.3618, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.0459461862146627e-05, |
|
"loss": 1.3992, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.0366858294316365e-05, |
|
"loss": 1.4042, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.0274254726486102e-05, |
|
"loss": 1.4052, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0181651158655844e-05, |
|
"loss": 1.4609, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.008904759082558e-05, |
|
"loss": 1.4281, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.999644402299532e-05, |
|
"loss": 1.4165, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9903840455165057e-05, |
|
"loss": 1.383, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9811236887334798e-05, |
|
"loss": 1.3725, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9718633319504536e-05, |
|
"loss": 1.3656, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.9626029751674274e-05, |
|
"loss": 1.4094, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.953342618384401e-05, |
|
"loss": 1.4369, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.9440822616013753e-05, |
|
"loss": 1.4073, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9348219048183487e-05, |
|
"loss": 1.3785, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9255615480353228e-05, |
|
"loss": 1.4189, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.9163011912522966e-05, |
|
"loss": 1.3908, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.9070408344692707e-05, |
|
"loss": 1.414, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.897780477686244e-05, |
|
"loss": 1.3761, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.8885201209032183e-05, |
|
"loss": 1.4145, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.879259764120192e-05, |
|
"loss": 1.425, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.869999407337166e-05, |
|
"loss": 1.4359, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.8607390505541396e-05, |
|
"loss": 1.3791, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.8514786937711137e-05, |
|
"loss": 1.3784, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.8422183369880875e-05, |
|
"loss": 1.4094, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.8329579802050613e-05, |
|
"loss": 1.4048, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.8236976234220354e-05, |
|
"loss": 1.4183, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.8144372666390092e-05, |
|
"loss": 1.3787, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.8051769098559833e-05, |
|
"loss": 1.4246, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.7959165530729567e-05, |
|
"loss": 1.3803, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.786656196289931e-05, |
|
"loss": 1.4329, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.7773958395069046e-05, |
|
"loss": 1.3802, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.7681354827238787e-05, |
|
"loss": 1.3925, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.7588751259408522e-05, |
|
"loss": 1.3817, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.7496147691578263e-05, |
|
"loss": 1.3918, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.7403544123748e-05, |
|
"loss": 1.4201, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.731094055591774e-05, |
|
"loss": 1.4103, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.7218336988087476e-05, |
|
"loss": 1.4312, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.7125733420257218e-05, |
|
"loss": 1.3746, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.7033129852426955e-05, |
|
"loss": 1.4273, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.6940526284596693e-05, |
|
"loss": 1.3886, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.684792271676643e-05, |
|
"loss": 1.3741, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.6755319148936172e-05, |
|
"loss": 1.3609, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.666271558110591e-05, |
|
"loss": 1.3623, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.6570112013275648e-05, |
|
"loss": 1.3889, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.6477508445445385e-05, |
|
"loss": 1.448, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.6384904877615127e-05, |
|
"loss": 1.3549, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.6292301309784864e-05, |
|
"loss": 1.4019, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.6199697741954602e-05, |
|
"loss": 1.3859, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.610709417412434e-05, |
|
"loss": 1.3908, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.601449060629408e-05, |
|
"loss": 1.3679, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.592188703846382e-05, |
|
"loss": 1.359, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.5829283470633557e-05, |
|
"loss": 1.3884, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.5736679902803294e-05, |
|
"loss": 1.3703, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.5644076334973036e-05, |
|
"loss": 1.3434, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.5551472767142773e-05, |
|
"loss": 1.3662, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.545886919931251e-05, |
|
"loss": 1.4051, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.5366265631482252e-05, |
|
"loss": 1.4064, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.527366206365199e-05, |
|
"loss": 1.3615, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.518105849582173e-05, |
|
"loss": 1.3496, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.5088454927991466e-05, |
|
"loss": 1.4558, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.4995851360161205e-05, |
|
"loss": 1.3331, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.4903247792330943e-05, |
|
"loss": 1.3776, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.4810644224500682e-05, |
|
"loss": 1.3912, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.471804065667042e-05, |
|
"loss": 1.3816, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.462543708884016e-05, |
|
"loss": 1.3897, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4532833521009898e-05, |
|
"loss": 1.3831, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4440229953179637e-05, |
|
"loss": 1.3954, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.4347626385349375e-05, |
|
"loss": 1.383, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.4255022817519114e-05, |
|
"loss": 1.3408, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.4162419249688852e-05, |
|
"loss": 1.3996, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.4069815681858592e-05, |
|
"loss": 1.3653, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.397721211402833e-05, |
|
"loss": 1.4181, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.3884608546198069e-05, |
|
"loss": 1.3542, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.3792004978367807e-05, |
|
"loss": 1.3785, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.3699401410537546e-05, |
|
"loss": 1.4211, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.3606797842707284e-05, |
|
"loss": 1.3833, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3514194274877023e-05, |
|
"loss": 1.3677, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.3421590707046761e-05, |
|
"loss": 1.4014, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.33289871392165e-05, |
|
"loss": 1.408, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.3236383571386238e-05, |
|
"loss": 1.3761, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.3143780003555978e-05, |
|
"loss": 1.412, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.3051176435725716e-05, |
|
"loss": 1.3706, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.2958572867895455e-05, |
|
"loss": 1.3831, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.2865969300065195e-05, |
|
"loss": 1.3469, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.2773365732234932e-05, |
|
"loss": 1.3403, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.2680762164404672e-05, |
|
"loss": 1.3783, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.258815859657441e-05, |
|
"loss": 1.3897, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.2495555028744147e-05, |
|
"loss": 1.3974, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.2402951460913887e-05, |
|
"loss": 0.9791, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.2310347893083625e-05, |
|
"loss": 0.9923, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.2217744325253364e-05, |
|
"loss": 0.9658, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.2125140757423102e-05, |
|
"loss": 1.0327, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.2032537189592841e-05, |
|
"loss": 0.9956, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.193993362176258e-05, |
|
"loss": 0.9618, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.1847330053932319e-05, |
|
"loss": 0.9582, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.1754726486102056e-05, |
|
"loss": 1.002, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.1662122918271796e-05, |
|
"loss": 0.959, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.1569519350441535e-05, |
|
"loss": 0.9495, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.1476915782611273e-05, |
|
"loss": 1.016, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.1384312214781013e-05, |
|
"loss": 1.0179, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.129170864695075e-05, |
|
"loss": 0.9873, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.119910507912049e-05, |
|
"loss": 1.0094, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.1106501511290228e-05, |
|
"loss": 0.9773, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.1013897943459967e-05, |
|
"loss": 1.004, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.0921294375629705e-05, |
|
"loss": 0.9712, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.0828690807799444e-05, |
|
"loss": 0.9503, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.0736087239969182e-05, |
|
"loss": 1.0044, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.064348367213892e-05, |
|
"loss": 0.9697, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.055088010430866e-05, |
|
"loss": 0.9893, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.0458276536478397e-05, |
|
"loss": 1.028, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.0365672968648137e-05, |
|
"loss": 0.972, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.0273069400817875e-05, |
|
"loss": 0.975, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.0180465832987614e-05, |
|
"loss": 0.9553, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.0087862265157352e-05, |
|
"loss": 0.9977, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 9.995258697327091e-06, |
|
"loss": 1.0038, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 9.902655129496829e-06, |
|
"loss": 0.9956, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 9.810051561666569e-06, |
|
"loss": 0.975, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 9.717447993836306e-06, |
|
"loss": 0.9638, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 9.624844426006046e-06, |
|
"loss": 0.9551, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 9.532240858175784e-06, |
|
"loss": 0.9791, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 9.439637290345523e-06, |
|
"loss": 1.015, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 9.347033722515261e-06, |
|
"loss": 0.9465, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 9.254430154685e-06, |
|
"loss": 0.9856, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 9.161826586854738e-06, |
|
"loss": 0.9704, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 9.069223019024476e-06, |
|
"loss": 0.9467, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 8.976619451194217e-06, |
|
"loss": 0.9747, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.884015883363955e-06, |
|
"loss": 0.9979, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 8.791412315533694e-06, |
|
"loss": 0.9839, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 8.698808747703432e-06, |
|
"loss": 0.9905, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 8.606205179873172e-06, |
|
"loss": 1.0233, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 8.51360161204291e-06, |
|
"loss": 1.0214, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.420998044212649e-06, |
|
"loss": 0.9928, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.328394476382387e-06, |
|
"loss": 0.98, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.235790908552126e-06, |
|
"loss": 0.9924, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 8.143187340721864e-06, |
|
"loss": 1.0215, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 8.050583772891602e-06, |
|
"loss": 0.957, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 7.957980205061341e-06, |
|
"loss": 0.988, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 7.865376637231079e-06, |
|
"loss": 0.9771, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 7.772773069400818e-06, |
|
"loss": 0.9663, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 7.680169501570556e-06, |
|
"loss": 0.9971, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 7.587565933740296e-06, |
|
"loss": 0.9708, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 7.494962365910034e-06, |
|
"loss": 0.9741, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 7.402358798079773e-06, |
|
"loss": 0.9427, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 7.309755230249511e-06, |
|
"loss": 1.0002, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 7.2171516624192494e-06, |
|
"loss": 0.9685, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 7.124548094588988e-06, |
|
"loss": 0.9676, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 7.031944526758727e-06, |
|
"loss": 0.9527, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 6.939340958928465e-06, |
|
"loss": 0.9675, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 6.846737391098204e-06, |
|
"loss": 0.9852, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 6.754133823267943e-06, |
|
"loss": 0.9607, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.661530255437681e-06, |
|
"loss": 0.9526, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.56892668760742e-06, |
|
"loss": 0.9592, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 6.4763231197771585e-06, |
|
"loss": 0.9227, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 6.383719551946897e-06, |
|
"loss": 0.9842, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.291115984116637e-06, |
|
"loss": 0.9649, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.198512416286375e-06, |
|
"loss": 0.9231, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 6.105908848456114e-06, |
|
"loss": 0.9331, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 6.013305280625852e-06, |
|
"loss": 0.9748, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 5.92070171279559e-06, |
|
"loss": 0.9471, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 5.828098144965329e-06, |
|
"loss": 0.9819, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 5.7354945771350676e-06, |
|
"loss": 0.9898, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 5.642891009304807e-06, |
|
"loss": 0.9692, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 5.550287441474546e-06, |
|
"loss": 0.9939, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 5.457683873644284e-06, |
|
"loss": 0.9903, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 5.365080305814023e-06, |
|
"loss": 0.9671, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 5.2724767379837616e-06, |
|
"loss": 0.9527, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.1798731701535e-06, |
|
"loss": 0.9644, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.087269602323239e-06, |
|
"loss": 0.9809, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.9946660344929775e-06, |
|
"loss": 0.9401, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.902062466662716e-06, |
|
"loss": 0.9661, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.809458898832455e-06, |
|
"loss": 0.9696, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.7168553310021925e-06, |
|
"loss": 0.959, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.624251763171931e-06, |
|
"loss": 0.9661, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.53164819534167e-06, |
|
"loss": 0.9654, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.439044627511408e-06, |
|
"loss": 0.9804, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.346441059681148e-06, |
|
"loss": 0.9792, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.2538374918508865e-06, |
|
"loss": 0.8952, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.161233924020625e-06, |
|
"loss": 0.9645, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.068630356190364e-06, |
|
"loss": 0.9889, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.976026788360102e-06, |
|
"loss": 0.9172, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.883423220529841e-06, |
|
"loss": 0.9503, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.7908196526995792e-06, |
|
"loss": 0.9454, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.698216084869318e-06, |
|
"loss": 0.9557, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.6056125170390565e-06, |
|
"loss": 0.9833, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.513008949208795e-06, |
|
"loss": 0.9378, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.4204053813785338e-06, |
|
"loss": 0.9718, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.3278018135482724e-06, |
|
"loss": 0.9639, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.235198245718011e-06, |
|
"loss": 0.9254, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.1425946778877497e-06, |
|
"loss": 1.0007, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.0499911100574883e-06, |
|
"loss": 0.968, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 2.957387542227227e-06, |
|
"loss": 0.9663, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 2.8647839743969656e-06, |
|
"loss": 0.9838, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.772180406566704e-06, |
|
"loss": 0.9547, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.6795768387364432e-06, |
|
"loss": 0.9677, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 2.586973270906182e-06, |
|
"loss": 0.9753, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.49436970307592e-06, |
|
"loss": 0.9834, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.4017661352456587e-06, |
|
"loss": 0.9809, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.3091625674153974e-06, |
|
"loss": 0.9584, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 2.216558999585136e-06, |
|
"loss": 0.9321, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.1239554317548746e-06, |
|
"loss": 0.944, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 2.0313518639246137e-06, |
|
"loss": 0.9592, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.9387482960943523e-06, |
|
"loss": 0.96, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.8461447282640907e-06, |
|
"loss": 0.9804, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.7535411604338294e-06, |
|
"loss": 0.9617, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.6609375926035678e-06, |
|
"loss": 0.9234, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.5683340247733064e-06, |
|
"loss": 0.965, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.4757304569430452e-06, |
|
"loss": 0.9286, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.3831268891127839e-06, |
|
"loss": 0.9899, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.2905233212825223e-06, |
|
"loss": 0.9527, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.1979197534522611e-06, |
|
"loss": 0.9387, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.1053161856219998e-06, |
|
"loss": 0.9385, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.0127126177917382e-06, |
|
"loss": 0.976, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 9.20109049961477e-07, |
|
"loss": 0.9702, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 8.275054821312157e-07, |
|
"loss": 0.9343, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 7.349019143009542e-07, |
|
"loss": 0.9258, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 6.422983464706928e-07, |
|
"loss": 0.9513, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 5.496947786404315e-07, |
|
"loss": 0.9689, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.5709121081017015e-07, |
|
"loss": 0.9296, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.644876429799088e-07, |
|
"loss": 0.9628, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.7188407514964736e-07, |
|
"loss": 0.9392, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.7928050731938602e-07, |
|
"loss": 0.9467, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 8.667693948912465e-08, |
|
"loss": 0.9457, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 269968, |
|
"total_flos": 2.914340112407593e+17, |
|
"train_loss": 1.6431024962071243, |
|
"train_runtime": 97099.2276, |
|
"train_samples_per_second": 2.78, |
|
"train_steps_per_second": 2.78 |
|
} |
|
], |
|
"max_steps": 269968, |
|
"num_train_epochs": 4, |
|
"total_flos": 2.914340112407593e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|