|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 137964, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9818793308399294e-05, |
|
"loss": 3.0214, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9637586616798586e-05, |
|
"loss": 2.1747, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9456379925197885e-05, |
|
"loss": 1.8966, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.927517323359717e-05, |
|
"loss": 1.7367, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.909396654199647e-05, |
|
"loss": 1.6498, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8912759850395754e-05, |
|
"loss": 1.5595, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.873155315879505e-05, |
|
"loss": 1.4976, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.855034646719434e-05, |
|
"loss": 1.4493, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.836913977559364e-05, |
|
"loss": 1.4386, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.818793308399292e-05, |
|
"loss": 1.4008, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.800672639239222e-05, |
|
"loss": 1.3648, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.782551970079151e-05, |
|
"loss": 1.332, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7644313009190805e-05, |
|
"loss": 1.31, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.74631063175901e-05, |
|
"loss": 1.3129, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.728189962598939e-05, |
|
"loss": 1.2762, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.710069293438869e-05, |
|
"loss": 1.2457, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.691948624278797e-05, |
|
"loss": 1.2464, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.673827955118727e-05, |
|
"loss": 1.2334, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.655707285958656e-05, |
|
"loss": 1.2099, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6375866167985856e-05, |
|
"loss": 1.1947, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.619465947638514e-05, |
|
"loss": 1.1837, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.601345278478444e-05, |
|
"loss": 1.1655, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.583224609318373e-05, |
|
"loss": 1.1704, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5651039401583024e-05, |
|
"loss": 1.1593, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5469832709982316e-05, |
|
"loss": 1.1309, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.528862601838161e-05, |
|
"loss": 1.1268, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5107419326780906e-05, |
|
"loss": 1.1334, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.492621263518019e-05, |
|
"loss": 1.1136, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.474500594357949e-05, |
|
"loss": 1.1129, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4563799251978775e-05, |
|
"loss": 1.0964, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4382592560378074e-05, |
|
"loss": 1.0964, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.420138586877736e-05, |
|
"loss": 1.0841, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.402017917717666e-05, |
|
"loss": 1.0873, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.383897248557595e-05, |
|
"loss": 1.0711, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.365776579397524e-05, |
|
"loss": 1.061, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3476559102374534e-05, |
|
"loss": 1.0518, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3295352410773826e-05, |
|
"loss": 1.0577, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.311414571917312e-05, |
|
"loss": 1.0619, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.293293902757241e-05, |
|
"loss": 1.061, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.275173233597171e-05, |
|
"loss": 1.0464, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2570525644370994e-05, |
|
"loss": 1.038, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.238931895277029e-05, |
|
"loss": 1.027, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.220811226116958e-05, |
|
"loss": 1.0309, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.202690556956888e-05, |
|
"loss": 1.0258, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.184569887796817e-05, |
|
"loss": 1.01, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.166449218636746e-05, |
|
"loss": 1.0198, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.148328549476675e-05, |
|
"loss": 1.0066, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1302078803166045e-05, |
|
"loss": 0.9983, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.112087211156534e-05, |
|
"loss": 0.9934, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.093966541996463e-05, |
|
"loss": 0.9902, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.075845872836393e-05, |
|
"loss": 1.0013, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.057725203676321e-05, |
|
"loss": 0.9806, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.039604534516251e-05, |
|
"loss": 1.0136, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0214838653561797e-05, |
|
"loss": 0.9899, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.0033631961961095e-05, |
|
"loss": 0.9824, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.985242527036039e-05, |
|
"loss": 0.9735, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.967121857875968e-05, |
|
"loss": 0.9811, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.949001188715897e-05, |
|
"loss": 0.9708, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.930880519555826e-05, |
|
"loss": 0.9819, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9127598503957555e-05, |
|
"loss": 0.9623, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.894639181235685e-05, |
|
"loss": 0.9595, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.876518512075614e-05, |
|
"loss": 0.965, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.858397842915543e-05, |
|
"loss": 0.9619, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.840277173755473e-05, |
|
"loss": 0.9644, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8221565045954015e-05, |
|
"loss": 0.9442, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8040358354353314e-05, |
|
"loss": 0.9481, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7859151662752606e-05, |
|
"loss": 0.9554, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.76779449711519e-05, |
|
"loss": 0.9415, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.749673827955119e-05, |
|
"loss": 0.9278, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.731553158795048e-05, |
|
"loss": 0.933, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7134324896349774e-05, |
|
"loss": 0.9278, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.6953118204749066e-05, |
|
"loss": 0.9316, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.677191151314836e-05, |
|
"loss": 0.9208, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.659070482154765e-05, |
|
"loss": 0.9229, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.640949812994694e-05, |
|
"loss": 0.9311, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6228291438346234e-05, |
|
"loss": 0.937, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.604708474674553e-05, |
|
"loss": 0.9227, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5865878055144824e-05, |
|
"loss": 0.9231, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5684671363544116e-05, |
|
"loss": 0.9296, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.550346467194341e-05, |
|
"loss": 0.9091, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.53222579803427e-05, |
|
"loss": 0.901, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.514105128874199e-05, |
|
"loss": 0.8959, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4959844597141284e-05, |
|
"loss": 0.9059, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4778637905540576e-05, |
|
"loss": 0.8996, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.459743121393987e-05, |
|
"loss": 0.8873, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.441622452233916e-05, |
|
"loss": 0.8974, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.423501783073845e-05, |
|
"loss": 0.9052, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.405381113913775e-05, |
|
"loss": 0.8852, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.387260444753704e-05, |
|
"loss": 0.9024, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3691397755936335e-05, |
|
"loss": 0.8941, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.351019106433563e-05, |
|
"loss": 0.9001, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.332898437273492e-05, |
|
"loss": 0.8952, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.314777768113421e-05, |
|
"loss": 0.8079, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.29665709895335e-05, |
|
"loss": 0.8126, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2785364297932795e-05, |
|
"loss": 0.8161, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.260415760633209e-05, |
|
"loss": 0.816, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.242295091473138e-05, |
|
"loss": 0.8252, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.224174422313067e-05, |
|
"loss": 0.8052, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.206053753152996e-05, |
|
"loss": 0.8165, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.187933083992926e-05, |
|
"loss": 0.8116, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1698124148328554e-05, |
|
"loss": 0.8234, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1516917456727846e-05, |
|
"loss": 0.8174, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.133571076512714e-05, |
|
"loss": 0.7963, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.115450407352643e-05, |
|
"loss": 0.807, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.097329738192572e-05, |
|
"loss": 0.8219, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0792090690325014e-05, |
|
"loss": 0.8023, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0610883998724306e-05, |
|
"loss": 0.784, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.04296773071236e-05, |
|
"loss": 0.8099, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.024847061552289e-05, |
|
"loss": 0.8159, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0067263923922185e-05, |
|
"loss": 0.7992, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.988605723232148e-05, |
|
"loss": 0.8047, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.970485054072077e-05, |
|
"loss": 0.8101, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9523643849120064e-05, |
|
"loss": 0.8161, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9342437157519353e-05, |
|
"loss": 0.7975, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9161230465918648e-05, |
|
"loss": 0.8019, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8980023774317937e-05, |
|
"loss": 0.7975, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8798817082717232e-05, |
|
"loss": 0.7981, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8617610391116524e-05, |
|
"loss": 0.7917, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.843640369951582e-05, |
|
"loss": 0.801, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8255197007915108e-05, |
|
"loss": 0.8019, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8073990316314403e-05, |
|
"loss": 0.7991, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7892783624713692e-05, |
|
"loss": 0.7888, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7711576933112987e-05, |
|
"loss": 0.7898, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7530370241512283e-05, |
|
"loss": 0.7881, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.734916354991157e-05, |
|
"loss": 0.7897, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7167956858310867e-05, |
|
"loss": 0.7816, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6986750166710155e-05, |
|
"loss": 0.7877, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.680554347510945e-05, |
|
"loss": 0.7926, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.662433678350874e-05, |
|
"loss": 0.7904, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6443130091908035e-05, |
|
"loss": 0.788, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6261923400307327e-05, |
|
"loss": 0.7884, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6080716708706622e-05, |
|
"loss": 0.7791, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.589951001710591e-05, |
|
"loss": 0.7844, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5718303325505206e-05, |
|
"loss": 0.7844, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.55370966339045e-05, |
|
"loss": 0.7849, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.535588994230379e-05, |
|
"loss": 0.7804, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5174683250703085e-05, |
|
"loss": 0.7773, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4993476559102374e-05, |
|
"loss": 0.7851, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.481226986750167e-05, |
|
"loss": 0.7779, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.463106317590096e-05, |
|
"loss": 0.7679, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4449856484300253e-05, |
|
"loss": 0.7709, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4268649792699545e-05, |
|
"loss": 0.7782, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4087443101098837e-05, |
|
"loss": 0.7587, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3906236409498133e-05, |
|
"loss": 0.7693, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3725029717897425e-05, |
|
"loss": 0.7735, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3543823026296717e-05, |
|
"loss": 0.7686, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.336261633469601e-05, |
|
"loss": 0.7604, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.31814096430953e-05, |
|
"loss": 0.7562, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3000202951494593e-05, |
|
"loss": 0.763, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2818996259893888e-05, |
|
"loss": 0.7717, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.263778956829318e-05, |
|
"loss": 0.7734, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2456582876692472e-05, |
|
"loss": 0.7701, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2275376185091764e-05, |
|
"loss": 0.771, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2094169493491056e-05, |
|
"loss": 0.7608, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1912962801890348e-05, |
|
"loss": 0.7674, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1731756110289643e-05, |
|
"loss": 0.7656, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1550549418688935e-05, |
|
"loss": 0.7623, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1369342727088227e-05, |
|
"loss": 0.7578, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.118813603548752e-05, |
|
"loss": 0.7728, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.100692934388681e-05, |
|
"loss": 0.7581, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0825722652286106e-05, |
|
"loss": 0.7535, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.06445159606854e-05, |
|
"loss": 0.7584, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.046330926908469e-05, |
|
"loss": 0.7595, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0282102577483982e-05, |
|
"loss": 0.7605, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0100895885883274e-05, |
|
"loss": 0.7453, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9919689194282566e-05, |
|
"loss": 0.7524, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.973848250268186e-05, |
|
"loss": 0.7434, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9557275811081154e-05, |
|
"loss": 0.7459, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9376069119480446e-05, |
|
"loss": 0.752, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9194862427879738e-05, |
|
"loss": 0.7463, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.901365573627903e-05, |
|
"loss": 0.7503, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8832449044678325e-05, |
|
"loss": 0.7455, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8651242353077617e-05, |
|
"loss": 0.7545, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.847003566147691e-05, |
|
"loss": 0.7394, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.82888289698762e-05, |
|
"loss": 0.7513, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8107622278275493e-05, |
|
"loss": 0.7461, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7926415586674785e-05, |
|
"loss": 0.7406, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7745208895074077e-05, |
|
"loss": 0.7407, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.756400220347337e-05, |
|
"loss": 0.7425, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7382795511872664e-05, |
|
"loss": 0.7434, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7201588820271956e-05, |
|
"loss": 0.7428, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7020382128671248e-05, |
|
"loss": 0.7356, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.683917543707054e-05, |
|
"loss": 0.7433, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6657968745469836e-05, |
|
"loss": 0.7247, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6476762053869128e-05, |
|
"loss": 0.6601, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.629555536226842e-05, |
|
"loss": 0.6681, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.611434867066771e-05, |
|
"loss": 0.6627, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5933141979067004e-05, |
|
"loss": 0.6547, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5751935287466296e-05, |
|
"loss": 0.6589, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5570728595865588e-05, |
|
"loss": 0.6623, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.538952190426488e-05, |
|
"loss": 0.6467, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5208315212664173e-05, |
|
"loss": 0.6689, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5027108521063465e-05, |
|
"loss": 0.6571, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4845901829462757e-05, |
|
"loss": 0.6631, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4664695137862053e-05, |
|
"loss": 0.673, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4483488446261345e-05, |
|
"loss": 0.6565, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4302281754660638e-05, |
|
"loss": 0.6649, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.412107506305993e-05, |
|
"loss": 0.658, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3939868371459222e-05, |
|
"loss": 0.6716, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3758661679858514e-05, |
|
"loss": 0.6547, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3577454988257806e-05, |
|
"loss": 0.6499, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.33962482966571e-05, |
|
"loss": 0.655, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3215041605056392e-05, |
|
"loss": 0.6636, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3033834913455684e-05, |
|
"loss": 0.6471, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2852628221854976e-05, |
|
"loss": 0.6588, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2671421530254271e-05, |
|
"loss": 0.6687, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2490214838653561e-05, |
|
"loss": 0.6511, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2309008147052855e-05, |
|
"loss": 0.657, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2127801455452149e-05, |
|
"loss": 0.6578, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.194659476385144e-05, |
|
"loss": 0.648, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1765388072250733e-05, |
|
"loss": 0.6599, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1584181380650025e-05, |
|
"loss": 0.6415, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1402974689049317e-05, |
|
"loss": 0.6565, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.122176799744861e-05, |
|
"loss": 0.6327, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1040561305847904e-05, |
|
"loss": 0.6526, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0859354614247196e-05, |
|
"loss": 0.6588, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0678147922646488e-05, |
|
"loss": 0.6592, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.049694123104578e-05, |
|
"loss": 0.6426, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0315734539445072e-05, |
|
"loss": 0.6492, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0134527847844366e-05, |
|
"loss": 0.6459, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.953321156243658e-06, |
|
"loss": 0.6454, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.772114464642951e-06, |
|
"loss": 0.648, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.590907773042243e-06, |
|
"loss": 0.6466, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.409701081441535e-06, |
|
"loss": 0.6441, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.228494389840829e-06, |
|
"loss": 0.6498, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.047287698240121e-06, |
|
"loss": 0.6443, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.866081006639413e-06, |
|
"loss": 0.648, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.684874315038707e-06, |
|
"loss": 0.6532, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.503667623437999e-06, |
|
"loss": 0.6421, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.32246093183729e-06, |
|
"loss": 0.6498, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.141254240236584e-06, |
|
"loss": 0.6442, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.960047548635876e-06, |
|
"loss": 0.6465, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.778840857035168e-06, |
|
"loss": 0.6421, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.597634165434461e-06, |
|
"loss": 0.6498, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.416427473833754e-06, |
|
"loss": 0.6403, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.2352207822330476e-06, |
|
"loss": 0.6344, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.0540140906323395e-06, |
|
"loss": 0.6552, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.8728073990316315e-06, |
|
"loss": 0.6366, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.691600707430924e-06, |
|
"loss": 0.6374, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.510394015830216e-06, |
|
"loss": 0.6409, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.329187324229509e-06, |
|
"loss": 0.6277, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.147980632628802e-06, |
|
"loss": 0.6409, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.966773941028095e-06, |
|
"loss": 0.6385, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.785567249427387e-06, |
|
"loss": 0.6455, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.60436055782668e-06, |
|
"loss": 0.6401, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.4231538662259725e-06, |
|
"loss": 0.6365, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.2419471746252644e-06, |
|
"loss": 0.6337, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.060740483024557e-06, |
|
"loss": 0.6316, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.87953379142385e-06, |
|
"loss": 0.6378, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.698327099823142e-06, |
|
"loss": 0.6311, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.517120408222436e-06, |
|
"loss": 0.6373, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.335913716621728e-06, |
|
"loss": 0.6405, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.15470702502102e-06, |
|
"loss": 0.6318, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.973500333420313e-06, |
|
"loss": 0.63, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.7922936418196054e-06, |
|
"loss": 0.6439, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.611086950218898e-06, |
|
"loss": 0.6343, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4298802586181906e-06, |
|
"loss": 0.6426, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.248673567017483e-06, |
|
"loss": 0.6273, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0674668754167754e-06, |
|
"loss": 0.6253, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.886260183816068e-06, |
|
"loss": 0.6427, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7050534922153607e-06, |
|
"loss": 0.6235, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.523846800614653e-06, |
|
"loss": 0.6254, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.342640109013946e-06, |
|
"loss": 0.6308, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1614334174132383e-06, |
|
"loss": 0.629, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9802267258125307e-06, |
|
"loss": 0.6336, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7990200342118236e-06, |
|
"loss": 0.6313, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6178133426111162e-06, |
|
"loss": 0.6339, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4366066510104086e-06, |
|
"loss": 0.6363, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.2553999594097012e-06, |
|
"loss": 0.6319, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0741932678089936e-06, |
|
"loss": 0.6242, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.929865762082863e-07, |
|
"loss": 0.6283, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.117798846075788e-07, |
|
"loss": 0.6442, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.305731930068714e-07, |
|
"loss": 0.6303, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.4936650140616397e-07, |
|
"loss": 0.6224, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.681598098054565e-07, |
|
"loss": 0.6341, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 137964, |
|
"total_flos": 1.394478551542825e+17, |
|
"train_loss": 0.8443687578099376, |
|
"train_runtime": 46828.2642, |
|
"train_samples_per_second": 29.461, |
|
"train_steps_per_second": 2.946 |
|
} |
|
], |
|
"max_steps": 137964, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.394478551542825e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|