|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 27.0, |
|
"global_step": 95877, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-09, |
|
"loss": 10.5552, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.5e-06, |
|
"loss": 9.5602, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5e-06, |
|
"loss": 7.9946, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.5e-06, |
|
"loss": 6.9937, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1e-05, |
|
"loss": 6.7102, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.25e-05, |
|
"loss": 6.549, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.5e-05, |
|
"loss": 6.4354, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.75e-05, |
|
"loss": 6.3387, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2e-05, |
|
"loss": 6.2644, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.25e-05, |
|
"loss": 6.1987, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.1412, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 6.0888, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3e-05, |
|
"loss": 6.0466, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 6.0064, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.5e-05, |
|
"loss": 5.9739, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 5.9407, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4e-05, |
|
"loss": 5.9138, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.2495e-05, |
|
"loss": 5.8914, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.4995000000000005e-05, |
|
"loss": 5.8642, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.7495e-05, |
|
"loss": 5.8451, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.9995000000000005e-05, |
|
"loss": 5.8283, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.998234543391946e-05, |
|
"loss": 5.8097, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.996465541690301e-05, |
|
"loss": 5.793, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.9946929948950656e-05, |
|
"loss": 5.7748, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.99292044809983e-05, |
|
"loss": 5.7613, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.991147901304595e-05, |
|
"loss": 5.7526, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.989375354509359e-05, |
|
"loss": 5.7426, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.987602807714124e-05, |
|
"loss": 5.7323, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.985830260918888e-05, |
|
"loss": 5.7168, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.984057714123653e-05, |
|
"loss": 5.7093, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.9822851673284175e-05, |
|
"loss": 5.7007, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 4.9805161656267726e-05, |
|
"loss": 5.6922, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.9787436188315376e-05, |
|
"loss": 5.6871, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 4.9769710720363025e-05, |
|
"loss": 5.6804, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.975198525241067e-05, |
|
"loss": 5.6747, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.973429523539422e-05, |
|
"loss": 5.6666, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 4.971656976744186e-05, |
|
"loss": 5.662, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 4.969884429948951e-05, |
|
"loss": 5.6527, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 4.9681118831537154e-05, |
|
"loss": 5.6505, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 4.9663428814520705e-05, |
|
"loss": 5.6443, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 4.964570334656835e-05, |
|
"loss": 5.642, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 4.9628013329551906e-05, |
|
"loss": 5.6377, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 4.961028786159955e-05, |
|
"loss": 5.6315, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 4.95925623936472e-05, |
|
"loss": 5.6261, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 4.957483692569484e-05, |
|
"loss": 5.6202, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 4.955711145774249e-05, |
|
"loss": 5.615, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 4.953938598979013e-05, |
|
"loss": 5.6091, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 4.952166052183778e-05, |
|
"loss": 5.6078, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 4.950397050482133e-05, |
|
"loss": 5.6059, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 4.9486245036868976e-05, |
|
"loss": 5.5979, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 4.946851956891662e-05, |
|
"loss": 5.5988, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 4.945079410096427e-05, |
|
"loss": 5.595, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 4.943306863301191e-05, |
|
"loss": 5.5909, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 4.941534316505956e-05, |
|
"loss": 5.5905, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 4.9397617697107204e-05, |
|
"loss": 5.584, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 4.937989222915485e-05, |
|
"loss": 5.5788, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 4.9362237663074306e-05, |
|
"loss": 5.5768, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 4.9344512195121955e-05, |
|
"loss": 5.5757, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 4.93267867271696e-05, |
|
"loss": 5.57, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 4.930906125921724e-05, |
|
"loss": 5.5683, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 4.929133579126489e-05, |
|
"loss": 5.5653, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 4.927361032331253e-05, |
|
"loss": 5.5612, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 4.925588485536018e-05, |
|
"loss": 5.5606, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 4.9238159387407825e-05, |
|
"loss": 5.5517, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.922046937039138e-05, |
|
"loss": 5.5098, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.9202743902439026e-05, |
|
"loss": 5.2495, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 4.9185018434486675e-05, |
|
"loss": 5.0503, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 4.916729296653432e-05, |
|
"loss": 4.8823, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 4.914956749858197e-05, |
|
"loss": 4.7268, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 4.913187748156551e-05, |
|
"loss": 4.5778, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 4.911415201361316e-05, |
|
"loss": 4.4355, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 4.9096426545660804e-05, |
|
"loss": 4.2972, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 4.907873652864436e-05, |
|
"loss": 4.1631, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 4.9061011060692005e-05, |
|
"loss": 4.0378, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 4.9043285592739654e-05, |
|
"loss": 3.9145, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 4.90255601247873e-05, |
|
"loss": 3.7891, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 4.9007834656834946e-05, |
|
"loss": 3.6041, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 4.899010918888259e-05, |
|
"loss": 3.2511, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 4.897238372093024e-05, |
|
"loss": 2.7629, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 4.895469370391378e-05, |
|
"loss": 2.5565, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"learning_rate": 4.893696823596143e-05, |
|
"loss": 2.4138, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"learning_rate": 4.8919242768009075e-05, |
|
"loss": 2.2866, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 4.8901517300056725e-05, |
|
"loss": 2.1777, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 4.8883827283040276e-05, |
|
"loss": 2.0846, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 4.8866101815087925e-05, |
|
"loss": 2.0126, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 4.884837634713557e-05, |
|
"loss": 1.9535, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 4.883065087918322e-05, |
|
"loss": 1.9016, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 4.881292541123086e-05, |
|
"loss": 1.8548, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 4.879523539421441e-05, |
|
"loss": 1.8197, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 4.8777509926262054e-05, |
|
"loss": 1.7838, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 4.8759784458309704e-05, |
|
"loss": 1.7528, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 4.8742058990357346e-05, |
|
"loss": 1.7196, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 4.872433352240499e-05, |
|
"loss": 1.6943, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"learning_rate": 4.870664350538855e-05, |
|
"loss": 1.6715, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 4.868891803743619e-05, |
|
"loss": 1.6442, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 4.867119256948384e-05, |
|
"loss": 1.6238, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 4.865346710153148e-05, |
|
"loss": 1.6014, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 4.863574163357913e-05, |
|
"loss": 1.581, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 4.8618016165626774e-05, |
|
"loss": 1.5615, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 4.8600326148610325e-05, |
|
"loss": 1.5459, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 4.858260068065797e-05, |
|
"loss": 1.5299, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"learning_rate": 4.856487521270562e-05, |
|
"loss": 1.5138, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 14.36, |
|
"learning_rate": 4.854714974475326e-05, |
|
"loss": 1.4984, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 4.852945972773681e-05, |
|
"loss": 1.4886, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 4.851173425978446e-05, |
|
"loss": 1.472, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 4.849400879183211e-05, |
|
"loss": 1.4619, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"learning_rate": 4.847628332387975e-05, |
|
"loss": 1.4486, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"learning_rate": 4.84585578559274e-05, |
|
"loss": 1.4368, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 15.21, |
|
"learning_rate": 4.844086783891095e-05, |
|
"loss": 1.4239, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"learning_rate": 4.8423142370958596e-05, |
|
"loss": 1.4122, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 15.49, |
|
"learning_rate": 4.840541690300624e-05, |
|
"loss": 1.4043, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 15.63, |
|
"learning_rate": 4.838769143505389e-05, |
|
"loss": 1.3943, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"learning_rate": 4.836996596710153e-05, |
|
"loss": 1.3843, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 15.91, |
|
"learning_rate": 4.835224049914918e-05, |
|
"loss": 1.3764, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 4.8334515031196824e-05, |
|
"loss": 1.3662, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"learning_rate": 4.831678956324447e-05, |
|
"loss": 1.357, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"learning_rate": 4.8299170448099834e-05, |
|
"loss": 1.349, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"learning_rate": 4.828144498014748e-05, |
|
"loss": 1.3404, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"learning_rate": 4.8263719512195126e-05, |
|
"loss": 1.3355, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"learning_rate": 4.824599404424277e-05, |
|
"loss": 1.3258, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 4.822826857629041e-05, |
|
"loss": 1.3188, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 17.04, |
|
"learning_rate": 4.821054310833806e-05, |
|
"loss": 1.3101, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"learning_rate": 4.8192817640385704e-05, |
|
"loss": 1.3034, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 17.32, |
|
"learning_rate": 4.8175092172433354e-05, |
|
"loss": 1.2974, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"learning_rate": 4.8157366704480996e-05, |
|
"loss": 1.2897, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 4.8139676687464554e-05, |
|
"loss": 1.2832, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 4.81219512195122e-05, |
|
"loss": 1.2764, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 4.8104225751559846e-05, |
|
"loss": 1.2702, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"learning_rate": 4.808650028360749e-05, |
|
"loss": 1.2651, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 4.806877481565514e-05, |
|
"loss": 1.2572, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 4.805108479863868e-05, |
|
"loss": 1.2547, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 18.45, |
|
"learning_rate": 4.803335933068633e-05, |
|
"loss": 1.2476, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"learning_rate": 4.8015633862733975e-05, |
|
"loss": 1.243, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"learning_rate": 4.7997908394781625e-05, |
|
"loss": 1.2364, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 4.798018292682927e-05, |
|
"loss": 1.2312, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 4.7962492909812825e-05, |
|
"loss": 1.2246, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"learning_rate": 4.794476744186047e-05, |
|
"loss": 1.2205, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 19.29, |
|
"learning_rate": 4.792704197390812e-05, |
|
"loss": 1.2162, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 4.790931650595576e-05, |
|
"loss": 1.2122, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"learning_rate": 4.789162648893931e-05, |
|
"loss": 1.2071, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"learning_rate": 4.7873901020986954e-05, |
|
"loss": 1.2001, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"learning_rate": 4.7856175553034604e-05, |
|
"loss": 1.1961, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 4.7838450085082246e-05, |
|
"loss": 1.191, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 20.14, |
|
"learning_rate": 4.78207600680658e-05, |
|
"loss": 1.1853, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 20.28, |
|
"learning_rate": 4.780303460011345e-05, |
|
"loss": 1.1842, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"learning_rate": 4.7785309132161097e-05, |
|
"loss": 1.1771, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"learning_rate": 4.776758366420874e-05, |
|
"loss": 1.1728, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 20.7, |
|
"learning_rate": 4.774985819625638e-05, |
|
"loss": 1.1709, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"learning_rate": 4.773213272830403e-05, |
|
"loss": 1.1664, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 20.98, |
|
"learning_rate": 4.771444271128758e-05, |
|
"loss": 1.1615, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 21.12, |
|
"learning_rate": 4.7696717243335225e-05, |
|
"loss": 1.1567, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 21.26, |
|
"learning_rate": 4.767899177538287e-05, |
|
"loss": 1.1554, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 21.4, |
|
"learning_rate": 4.766126630743052e-05, |
|
"loss": 1.1511, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 21.54, |
|
"learning_rate": 4.764354083947816e-05, |
|
"loss": 1.146, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"learning_rate": 4.762581537152581e-05, |
|
"loss": 1.1418, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 21.82, |
|
"learning_rate": 4.760808990357345e-05, |
|
"loss": 1.1388, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 21.97, |
|
"learning_rate": 4.75903644356211e-05, |
|
"loss": 1.1344, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 22.11, |
|
"learning_rate": 4.757267441860465e-05, |
|
"loss": 1.1315, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 22.25, |
|
"learning_rate": 4.75549489506523e-05, |
|
"loss": 1.1258, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 22.39, |
|
"learning_rate": 4.7537223482699945e-05, |
|
"loss": 1.1206, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 22.53, |
|
"learning_rate": 4.7519498014747595e-05, |
|
"loss": 1.1188, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"learning_rate": 4.750184344866705e-05, |
|
"loss": 1.1194, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 22.81, |
|
"learning_rate": 4.748411798071469e-05, |
|
"loss": 1.1127, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 22.95, |
|
"learning_rate": 4.746639251276234e-05, |
|
"loss": 1.1116, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 23.09, |
|
"learning_rate": 4.744866704480998e-05, |
|
"loss": 1.1082, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 23.23, |
|
"learning_rate": 4.743094157685763e-05, |
|
"loss": 1.1043, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 23.37, |
|
"learning_rate": 4.741321610890528e-05, |
|
"loss": 1.1018, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 23.51, |
|
"learning_rate": 4.7395490640952924e-05, |
|
"loss": 1.0988, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 23.66, |
|
"learning_rate": 4.7377765173000574e-05, |
|
"loss": 1.0939, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 23.8, |
|
"learning_rate": 4.736007515598412e-05, |
|
"loss": 1.0936, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 23.94, |
|
"learning_rate": 4.734234968803177e-05, |
|
"loss": 1.09, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 24.08, |
|
"learning_rate": 4.732462422007941e-05, |
|
"loss": 1.0868, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 24.22, |
|
"learning_rate": 4.730689875212706e-05, |
|
"loss": 1.0855, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 24.36, |
|
"learning_rate": 4.72891732841747e-05, |
|
"loss": 1.0812, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 24.5, |
|
"learning_rate": 4.727144781622235e-05, |
|
"loss": 1.0798, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 24.64, |
|
"learning_rate": 4.7253722348269995e-05, |
|
"loss": 1.0772, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 24.78, |
|
"learning_rate": 4.7236032331253546e-05, |
|
"loss": 1.0763, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 24.92, |
|
"learning_rate": 4.7218306863301195e-05, |
|
"loss": 1.0732, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 25.06, |
|
"learning_rate": 4.7200581395348845e-05, |
|
"loss": 1.0699, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 25.2, |
|
"learning_rate": 4.718289137833239e-05, |
|
"loss": 1.0659, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 25.34, |
|
"learning_rate": 4.716516591038004e-05, |
|
"loss": 1.0668, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 25.49, |
|
"learning_rate": 4.714744044242768e-05, |
|
"loss": 1.0615, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 25.63, |
|
"learning_rate": 4.712971497447533e-05, |
|
"loss": 1.0594, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 25.77, |
|
"learning_rate": 4.7111989506522974e-05, |
|
"loss": 1.0612, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 25.91, |
|
"learning_rate": 4.7094299489506525e-05, |
|
"loss": 1.0545, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 26.05, |
|
"learning_rate": 4.707657402155417e-05, |
|
"loss": 1.0543, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 26.19, |
|
"learning_rate": 4.705884855360182e-05, |
|
"loss": 1.051, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 26.33, |
|
"learning_rate": 4.704112308564947e-05, |
|
"loss": 1.05, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 26.47, |
|
"learning_rate": 4.702339761769711e-05, |
|
"loss": 1.0478, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 26.61, |
|
"learning_rate": 4.700567214974476e-05, |
|
"loss": 1.0448, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 26.75, |
|
"learning_rate": 4.69879466817924e-05, |
|
"loss": 1.0433, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 26.89, |
|
"learning_rate": 4.697022121384005e-05, |
|
"loss": 1.0401, |
|
"step": 95500 |
|
} |
|
], |
|
"max_steps": 1420400, |
|
"num_train_epochs": 400, |
|
"total_flos": 2.583546946148054e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|