|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.8900739721314297, |
|
"global_step": 168000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9942657262457713e-05, |
|
"loss": 3.9578, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.988531452491542e-05, |
|
"loss": 3.884, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.982797178737313e-05, |
|
"loss": 3.8632, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.977062904983084e-05, |
|
"loss": 3.8207, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.971328631228855e-05, |
|
"loss": 3.8261, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9655943574746258e-05, |
|
"loss": 3.7841, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.959860083720397e-05, |
|
"loss": 3.7908, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.954125809966168e-05, |
|
"loss": 3.7871, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.948391536211939e-05, |
|
"loss": 3.7905, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.94265726245771e-05, |
|
"loss": 3.7622, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9369229887034807e-05, |
|
"loss": 3.7589, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9311887149492518e-05, |
|
"loss": 3.7362, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.925454441195023e-05, |
|
"loss": 3.755, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9197201674407937e-05, |
|
"loss": 3.7524, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9139858936865648e-05, |
|
"loss": 3.7345, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.908251619932336e-05, |
|
"loss": 3.7408, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9025173461781067e-05, |
|
"loss": 3.724, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.8967830724238778e-05, |
|
"loss": 3.7352, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8910487986696485e-05, |
|
"loss": 3.702, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8853145249154197e-05, |
|
"loss": 3.7193, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8795802511611908e-05, |
|
"loss": 3.7118, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8738459774069615e-05, |
|
"loss": 3.7175, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8681117036527326e-05, |
|
"loss": 3.7082, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8623774298985034e-05, |
|
"loss": 3.7038, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8566431561442745e-05, |
|
"loss": 3.6797, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8509088823900453e-05, |
|
"loss": 3.6899, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8451746086358164e-05, |
|
"loss": 3.6892, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8394403348815875e-05, |
|
"loss": 3.7003, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8337060611273583e-05, |
|
"loss": 3.7041, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8279717873731294e-05, |
|
"loss": 3.6932, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8222375136189e-05, |
|
"loss": 3.6759, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.8165032398646713e-05, |
|
"loss": 3.6823, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.810768966110442e-05, |
|
"loss": 3.6839, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.805034692356213e-05, |
|
"loss": 3.6515, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7993004186019843e-05, |
|
"loss": 3.69, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7935661448477554e-05, |
|
"loss": 3.6809, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.787831871093526e-05, |
|
"loss": 3.6786, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7820975973392973e-05, |
|
"loss": 3.663, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.776363323585068e-05, |
|
"loss": 3.6654, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.770629049830839e-05, |
|
"loss": 3.695, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.76489477607661e-05, |
|
"loss": 3.6576, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.759160502322381e-05, |
|
"loss": 3.6561, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.753426228568152e-05, |
|
"loss": 3.6745, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.747691954813923e-05, |
|
"loss": 3.6581, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.741957681059694e-05, |
|
"loss": 3.6571, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.7362234073054648e-05, |
|
"loss": 3.6591, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.730489133551236e-05, |
|
"loss": 3.6647, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.7247548597970067e-05, |
|
"loss": 3.6542, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7190205860427778e-05, |
|
"loss": 3.6781, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.713286312288549e-05, |
|
"loss": 3.6542, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.70755203853432e-05, |
|
"loss": 3.6523, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7018177647800908e-05, |
|
"loss": 3.6477, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.6960834910258615e-05, |
|
"loss": 3.6498, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.6903492172716326e-05, |
|
"loss": 3.6227, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.6846149435174038e-05, |
|
"loss": 3.6359, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.6788806697631745e-05, |
|
"loss": 3.6491, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.6731463960089456e-05, |
|
"loss": 3.6425, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.6674121222547167e-05, |
|
"loss": 3.6357, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6616778485004875e-05, |
|
"loss": 3.6657, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6559435747462583e-05, |
|
"loss": 3.6432, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6502093009920294e-05, |
|
"loss": 3.6152, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6444750272378005e-05, |
|
"loss": 3.6392, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6387407534835716e-05, |
|
"loss": 3.6406, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6330064797293424e-05, |
|
"loss": 3.6509, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6272722059751135e-05, |
|
"loss": 3.6324, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6215379322208843e-05, |
|
"loss": 3.6346, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6158036584666554e-05, |
|
"loss": 3.6443, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.610069384712426e-05, |
|
"loss": 3.6355, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6043351109581973e-05, |
|
"loss": 3.6321, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5986008372039684e-05, |
|
"loss": 3.6374, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.592866563449739e-05, |
|
"loss": 3.6235, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5871322896955103e-05, |
|
"loss": 3.6365, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.581398015941281e-05, |
|
"loss": 3.6262, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.575663742187052e-05, |
|
"loss": 3.6121, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.569929468432823e-05, |
|
"loss": 3.6266, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.564195194678594e-05, |
|
"loss": 3.6206, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.558460920924365e-05, |
|
"loss": 3.6531, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5527266471701362e-05, |
|
"loss": 3.6124, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.546992373415907e-05, |
|
"loss": 3.6204, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5412580996616778e-05, |
|
"loss": 3.6393, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.535523825907449e-05, |
|
"loss": 3.6177, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.52978955215322e-05, |
|
"loss": 3.618, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5240552783989908e-05, |
|
"loss": 3.6169, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.5183210046447619e-05, |
|
"loss": 3.6154, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.5125867308905328e-05, |
|
"loss": 3.6428, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.506852457136304e-05, |
|
"loss": 3.6056, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.5011181833820747e-05, |
|
"loss": 3.6168, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.4953839096278458e-05, |
|
"loss": 3.6228, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.4896496358736167e-05, |
|
"loss": 3.618, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.4839153621193879e-05, |
|
"loss": 3.6016, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4781810883651586e-05, |
|
"loss": 3.6104, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.4724468146109296e-05, |
|
"loss": 3.6012, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.4667125408567007e-05, |
|
"loss": 3.6265, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.4609782671024715e-05, |
|
"loss": 3.617, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.4552439933482426e-05, |
|
"loss": 3.5995, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4495097195940135e-05, |
|
"loss": 3.5959, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4437754458397846e-05, |
|
"loss": 3.596, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.4380411720855554e-05, |
|
"loss": 3.6095, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.4323068983313265e-05, |
|
"loss": 3.6183, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.4265726245770974e-05, |
|
"loss": 3.5972, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.4208383508228685e-05, |
|
"loss": 3.6019, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.4151040770686393e-05, |
|
"loss": 3.5863, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.4093698033144103e-05, |
|
"loss": 3.5922, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.4036355295601814e-05, |
|
"loss": 3.5915, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3979012558059523e-05, |
|
"loss": 3.6097, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3921669820517232e-05, |
|
"loss": 3.5922, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.3864327082974942e-05, |
|
"loss": 3.612, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3806984345432653e-05, |
|
"loss": 3.6296, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.3749641607890362e-05, |
|
"loss": 3.6123, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.369229887034807e-05, |
|
"loss": 3.5877, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3634956132805781e-05, |
|
"loss": 3.5991, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.357761339526349e-05, |
|
"loss": 3.6002, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3520270657721202e-05, |
|
"loss": 3.5908, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.346292792017891e-05, |
|
"loss": 3.5934, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.340558518263662e-05, |
|
"loss": 3.5582, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.334824244509433e-05, |
|
"loss": 3.5878, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.488621711730957, |
|
"eval_runtime": 195.5381, |
|
"eval_samples_per_second": 264.255, |
|
"eval_steps_per_second": 33.032, |
|
"step": 58130 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.3290899707552041e-05, |
|
"loss": 3.5763, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.3233556970009749e-05, |
|
"loss": 3.5624, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.317621423246746e-05, |
|
"loss": 3.5707, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.311887149492517e-05, |
|
"loss": 3.5385, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.3061528757382877e-05, |
|
"loss": 3.531, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.3004186019840588e-05, |
|
"loss": 3.5365, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.2946843282298297e-05, |
|
"loss": 3.5483, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.2889500544756009e-05, |
|
"loss": 3.5502, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.2832157807213716e-05, |
|
"loss": 3.5452, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.2774815069671427e-05, |
|
"loss": 3.5772, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.2717472332129137e-05, |
|
"loss": 3.5712, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.2660129594586848e-05, |
|
"loss": 3.5656, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.2602786857044556e-05, |
|
"loss": 3.5558, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.2545444119502267e-05, |
|
"loss": 3.5465, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.2488101381959976e-05, |
|
"loss": 3.5414, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.2430758644417687e-05, |
|
"loss": 3.553, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.2373415906875395e-05, |
|
"loss": 3.5542, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.2316073169333104e-05, |
|
"loss": 3.5641, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.2258730431790815e-05, |
|
"loss": 3.5515, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.2201387694248525e-05, |
|
"loss": 3.5564, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.2144044956706234e-05, |
|
"loss": 3.5536, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.2086702219163944e-05, |
|
"loss": 3.5542, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.2029359481621655e-05, |
|
"loss": 3.5631, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.1972016744079364e-05, |
|
"loss": 3.5497, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.1914674006537072e-05, |
|
"loss": 3.5603, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.1857331268994783e-05, |
|
"loss": 3.5516, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.1799988531452492e-05, |
|
"loss": 3.569, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.1742645793910202e-05, |
|
"loss": 3.5685, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.1685303056367911e-05, |
|
"loss": 3.5591, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.1627960318825622e-05, |
|
"loss": 3.5344, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.1570617581283332e-05, |
|
"loss": 3.5444, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.1513274843741041e-05, |
|
"loss": 3.5395, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.145593210619875e-05, |
|
"loss": 3.5432, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.1398589368656462e-05, |
|
"loss": 3.5527, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.1341246631114171e-05, |
|
"loss": 3.5768, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.1283903893571879e-05, |
|
"loss": 3.569, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.122656115602959e-05, |
|
"loss": 3.5524, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.1169218418487299e-05, |
|
"loss": 3.5415, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.111187568094501e-05, |
|
"loss": 3.5467, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.1054532943402718e-05, |
|
"loss": 3.534, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.0997190205860429e-05, |
|
"loss": 3.5524, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.0939847468318138e-05, |
|
"loss": 3.563, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.088250473077585e-05, |
|
"loss": 3.5339, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.0825161993233557e-05, |
|
"loss": 3.5339, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.0767819255691267e-05, |
|
"loss": 3.5453, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.0710476518148978e-05, |
|
"loss": 3.5352, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.0653133780606689e-05, |
|
"loss": 3.5337, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.0595791043064397e-05, |
|
"loss": 3.5497, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0538448305522106e-05, |
|
"loss": 3.5081, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0481105567979817e-05, |
|
"loss": 3.5442, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.0423762830437525e-05, |
|
"loss": 3.5356, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.0366420092895236e-05, |
|
"loss": 3.5521, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.0309077355352945e-05, |
|
"loss": 3.5256, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0251734617810656e-05, |
|
"loss": 3.5487, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0194391880268364e-05, |
|
"loss": 3.5433, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0137049142726074e-05, |
|
"loss": 3.5477, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0079706405183785e-05, |
|
"loss": 3.5315, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0022363667641494e-05, |
|
"loss": 3.5268, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.965020930099203e-06, |
|
"loss": 3.5473, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.907678192556913e-06, |
|
"loss": 3.5447, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 9.850335455014624e-06, |
|
"loss": 3.5397, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.792992717472333e-06, |
|
"loss": 3.5157, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.735649979930043e-06, |
|
"loss": 3.5399, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.678307242387752e-06, |
|
"loss": 3.5491, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.620964504845463e-06, |
|
"loss": 3.5402, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.563621767303171e-06, |
|
"loss": 3.5474, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.506279029760882e-06, |
|
"loss": 3.531, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.448936292218591e-06, |
|
"loss": 3.523, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.391593554676301e-06, |
|
"loss": 3.5462, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.33425081713401e-06, |
|
"loss": 3.5526, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.276908079591721e-06, |
|
"loss": 3.5399, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.21956534204943e-06, |
|
"loss": 3.5373, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.16222260450714e-06, |
|
"loss": 3.5321, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.10487986696485e-06, |
|
"loss": 3.5396, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.047537129422559e-06, |
|
"loss": 3.535, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.990194391880268e-06, |
|
"loss": 3.5285, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.932851654337978e-06, |
|
"loss": 3.5281, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.875508916795689e-06, |
|
"loss": 3.5547, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.818166179253398e-06, |
|
"loss": 3.5361, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.760823441711108e-06, |
|
"loss": 3.538, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.703480704168817e-06, |
|
"loss": 3.5353, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.646137966626528e-06, |
|
"loss": 3.5402, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.588795229084238e-06, |
|
"loss": 3.5299, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.531452491541947e-06, |
|
"loss": 3.5395, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.474109753999656e-06, |
|
"loss": 3.5372, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.416767016457368e-06, |
|
"loss": 3.5259, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.359424278915075e-06, |
|
"loss": 3.5368, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.302081541372786e-06, |
|
"loss": 3.5278, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.244738803830496e-06, |
|
"loss": 3.5277, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.187396066288205e-06, |
|
"loss": 3.5382, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.130053328745915e-06, |
|
"loss": 3.5276, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.072710591203626e-06, |
|
"loss": 3.5348, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.015367853661335e-06, |
|
"loss": 3.5368, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.958025116119044e-06, |
|
"loss": 3.5339, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.900682378576754e-06, |
|
"loss": 3.5285, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.843339641034463e-06, |
|
"loss": 3.5172, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.785996903492173e-06, |
|
"loss": 3.5349, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.728654165949882e-06, |
|
"loss": 3.5305, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.671311428407593e-06, |
|
"loss": 3.533, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.613968690865302e-06, |
|
"loss": 3.5158, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.556625953323012e-06, |
|
"loss": 3.5257, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.499283215780721e-06, |
|
"loss": 3.5343, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.441940478238432e-06, |
|
"loss": 3.5337, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.384597740696141e-06, |
|
"loss": 3.5388, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.327255003153851e-06, |
|
"loss": 3.5295, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.269912265611561e-06, |
|
"loss": 3.5367, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.212569528069271e-06, |
|
"loss": 3.5139, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.15522679052698e-06, |
|
"loss": 3.5249, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.097884052984691e-06, |
|
"loss": 3.5232, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.040541315442399e-06, |
|
"loss": 3.5366, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.98319857790011e-06, |
|
"loss": 3.5347, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.925855840357819e-06, |
|
"loss": 3.5335, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.868513102815529e-06, |
|
"loss": 3.5481, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.8111703652732385e-06, |
|
"loss": 3.5375, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.753827627730949e-06, |
|
"loss": 3.5224, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.696484890188658e-06, |
|
"loss": 3.5047, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 3.4477686882019043, |
|
"eval_runtime": 195.5804, |
|
"eval_samples_per_second": 264.198, |
|
"eval_steps_per_second": 33.025, |
|
"step": 116260 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.639142152646368e-06, |
|
"loss": 3.5131, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 6.581799415104078e-06, |
|
"loss": 3.4867, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 6.524456677561786e-06, |
|
"loss": 3.4832, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 6.4671139400194975e-06, |
|
"loss": 3.4934, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 6.409771202477206e-06, |
|
"loss": 3.5148, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 6.352428464934916e-06, |
|
"loss": 3.5127, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 6.295085727392626e-06, |
|
"loss": 3.4871, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 6.237742989850336e-06, |
|
"loss": 3.4912, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 6.180400252308045e-06, |
|
"loss": 3.4929, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 6.123057514765756e-06, |
|
"loss": 3.5093, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 6.065714777223465e-06, |
|
"loss": 3.5089, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 6.008372039681175e-06, |
|
"loss": 3.5037, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 5.951029302138885e-06, |
|
"loss": 3.4928, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 5.893686564596595e-06, |
|
"loss": 3.4972, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 5.8363438270543035e-06, |
|
"loss": 3.4876, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 5.779001089512014e-06, |
|
"loss": 3.5086, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 5.721658351969723e-06, |
|
"loss": 3.4875, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 5.664315614427433e-06, |
|
"loss": 3.5013, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 5.606972876885143e-06, |
|
"loss": 3.5155, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 5.549630139342853e-06, |
|
"loss": 3.4845, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 5.4922874018005624e-06, |
|
"loss": 3.4891, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 5.434944664258273e-06, |
|
"loss": 3.4938, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 5.377601926715982e-06, |
|
"loss": 3.4838, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 5.320259189173692e-06, |
|
"loss": 3.4834, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 5.262916451631401e-06, |
|
"loss": 3.5056, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 5.20557371408911e-06, |
|
"loss": 3.4985, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 5.1482309765468206e-06, |
|
"loss": 3.4995, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 5.09088823900453e-06, |
|
"loss": 3.4868, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 5.03354550146224e-06, |
|
"loss": 3.4902, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.97620276391995e-06, |
|
"loss": 3.4682, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.91886002637766e-06, |
|
"loss": 3.4983, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.861517288835369e-06, |
|
"loss": 3.5203, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.8041745512930795e-06, |
|
"loss": 3.4964, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.746831813750789e-06, |
|
"loss": 3.5054, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.689489076208499e-06, |
|
"loss": 3.5083, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.632146338666209e-06, |
|
"loss": 3.5051, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.574803601123918e-06, |
|
"loss": 3.4892, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.517460863581627e-06, |
|
"loss": 3.4774, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.460118126039338e-06, |
|
"loss": 3.4805, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.402775388497047e-06, |
|
"loss": 3.511, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.3454326509547565e-06, |
|
"loss": 3.4856, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.288089913412467e-06, |
|
"loss": 3.5008, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.230747175870176e-06, |
|
"loss": 3.4837, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.173404438327886e-06, |
|
"loss": 3.504, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.116061700785596e-06, |
|
"loss": 3.4886, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.058718963243305e-06, |
|
"loss": 3.5019, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.0013762257010155e-06, |
|
"loss": 3.5087, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.944033488158725e-06, |
|
"loss": 3.4874, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.886690750616435e-06, |
|
"loss": 3.4921, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.8293480130741445e-06, |
|
"loss": 3.4895, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.7720052755318543e-06, |
|
"loss": 3.4737, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.714662537989564e-06, |
|
"loss": 3.5, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.6573198004472736e-06, |
|
"loss": 3.5245, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.5999770629049834e-06, |
|
"loss": 3.4985, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.5426343253626932e-06, |
|
"loss": 3.4999, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.485291587820403e-06, |
|
"loss": 3.498, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.427948850278113e-06, |
|
"loss": 3.4886, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.3706061127358223e-06, |
|
"loss": 3.5043, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.313263375193532e-06, |
|
"loss": 3.5119, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.255920637651242e-06, |
|
"loss": 3.4906, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.1985779001089514e-06, |
|
"loss": 3.4851, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.1412351625666608e-06, |
|
"loss": 3.5008, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.0838924250243706e-06, |
|
"loss": 3.4877, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.0265496874820804e-06, |
|
"loss": 3.4755, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.9692069499397902e-06, |
|
"loss": 3.517, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.9118642123975e-06, |
|
"loss": 3.4989, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.85452147485521e-06, |
|
"loss": 3.5124, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.7971787373129193e-06, |
|
"loss": 3.4972, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.739835999770629e-06, |
|
"loss": 3.4792, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.682493262228339e-06, |
|
"loss": 3.4981, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.625150524686049e-06, |
|
"loss": 3.5123, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.5678077871437586e-06, |
|
"loss": 3.4919, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.510465049601468e-06, |
|
"loss": 3.5071, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 2.453122312059178e-06, |
|
"loss": 3.4847, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 2.3957795745168877e-06, |
|
"loss": 3.4958, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 2.3384368369745975e-06, |
|
"loss": 3.481, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.2810940994323073e-06, |
|
"loss": 3.487, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 2.2237513618900168e-06, |
|
"loss": 3.5001, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.1664086243477266e-06, |
|
"loss": 3.5076, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.109065886805436e-06, |
|
"loss": 3.4981, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 2.051723149263146e-06, |
|
"loss": 3.5101, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.9943804117208556e-06, |
|
"loss": 3.489, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.9370376741785655e-06, |
|
"loss": 3.4688, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.879694936636275e-06, |
|
"loss": 3.5021, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.822352199093985e-06, |
|
"loss": 3.5025, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.7650094615516947e-06, |
|
"loss": 3.4762, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.7076667240094044e-06, |
|
"loss": 3.4872, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.6503239864671142e-06, |
|
"loss": 3.5097, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.592981248924824e-06, |
|
"loss": 3.4857, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.5356385113825334e-06, |
|
"loss": 3.5111, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.478295773840243e-06, |
|
"loss": 3.4998, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.4209530362979529e-06, |
|
"loss": 3.5165, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.3636102987556627e-06, |
|
"loss": 3.496, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.3062675612133723e-06, |
|
"loss": 3.5073, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.2489248236710821e-06, |
|
"loss": 3.5084, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.191582086128792e-06, |
|
"loss": 3.4961, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.1342393485865016e-06, |
|
"loss": 3.4989, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.0768966110442114e-06, |
|
"loss": 3.5057, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.019553873501921e-06, |
|
"loss": 3.5051, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 9.622111359596307e-07, |
|
"loss": 3.5005, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 9.048683984173405e-07, |
|
"loss": 3.4941, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.475256608750502e-07, |
|
"loss": 3.4784, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.9018292333276e-07, |
|
"loss": 3.4829, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.328401857904698e-07, |
|
"loss": 3.4855, |
|
"step": 168000 |
|
} |
|
], |
|
"max_steps": 174390, |
|
"num_train_epochs": 3, |
|
"total_flos": 4.3897854099456e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|