|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 180294, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.986133759304248e-05, |
|
"loss": 2.7631, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9722675186084955e-05, |
|
"loss": 1.9875, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9584012779127425e-05, |
|
"loss": 1.7721, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.94453503721699e-05, |
|
"loss": 1.6142, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.930668796521238e-05, |
|
"loss": 1.5159, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9168025558254854e-05, |
|
"loss": 1.4552, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.902936315129733e-05, |
|
"loss": 1.4205, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.88907007443398e-05, |
|
"loss": 1.355, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.875203833738228e-05, |
|
"loss": 1.3154, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.861337593042475e-05, |
|
"loss": 1.2864, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.847471352346723e-05, |
|
"loss": 1.2668, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.83360511165097e-05, |
|
"loss": 1.2525, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8197388709552176e-05, |
|
"loss": 1.2474, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.805872630259465e-05, |
|
"loss": 1.2006, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.792006389563713e-05, |
|
"loss": 1.1849, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7781401488679605e-05, |
|
"loss": 1.1786, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7642739081722075e-05, |
|
"loss": 1.1462, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.750407667476455e-05, |
|
"loss": 1.1314, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.736541426780703e-05, |
|
"loss": 1.1171, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7226751860849504e-05, |
|
"loss": 1.1162, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.708808945389198e-05, |
|
"loss": 1.1141, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.694942704693445e-05, |
|
"loss": 1.081, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6810764639976926e-05, |
|
"loss": 1.0808, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.66721022330194e-05, |
|
"loss": 1.078, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.653343982606188e-05, |
|
"loss": 1.0797, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.639477741910435e-05, |
|
"loss": 1.0495, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6256115012146825e-05, |
|
"loss": 1.0576, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.61174526051893e-05, |
|
"loss": 1.0461, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.597879019823178e-05, |
|
"loss": 1.0314, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5840127791274255e-05, |
|
"loss": 1.0223, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5701465384316724e-05, |
|
"loss": 1.016, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.55628029773592e-05, |
|
"loss": 1.009, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.542414057040168e-05, |
|
"loss": 1.0033, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5285478163444154e-05, |
|
"loss": 0.9954, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.514681575648663e-05, |
|
"loss": 0.9986, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.50081533495291e-05, |
|
"loss": 0.9932, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4869490942571576e-05, |
|
"loss": 0.9877, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.473082853561405e-05, |
|
"loss": 0.9707, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.459216612865653e-05, |
|
"loss": 0.9758, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4453503721699005e-05, |
|
"loss": 0.9739, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.431484131474148e-05, |
|
"loss": 0.9785, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.417617890778396e-05, |
|
"loss": 0.9539, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.403751650082643e-05, |
|
"loss": 0.9442, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3898854093868904e-05, |
|
"loss": 0.9418, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.376019168691138e-05, |
|
"loss": 0.9513, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.362152927995386e-05, |
|
"loss": 0.9401, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3482866872996334e-05, |
|
"loss": 0.9386, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.334420446603881e-05, |
|
"loss": 0.9331, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3205542059081287e-05, |
|
"loss": 0.925, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3066879652123756e-05, |
|
"loss": 0.9387, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.292821724516623e-05, |
|
"loss": 0.927, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.278955483820871e-05, |
|
"loss": 0.923, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2650892431251185e-05, |
|
"loss": 0.9103, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2512230024293655e-05, |
|
"loss": 0.9135, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.237356761733613e-05, |
|
"loss": 0.9107, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.223490521037861e-05, |
|
"loss": 0.8941, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2096242803421084e-05, |
|
"loss": 0.9239, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.195758039646356e-05, |
|
"loss": 0.9114, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.181891798950603e-05, |
|
"loss": 0.9004, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.168025558254851e-05, |
|
"loss": 0.9063, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.154159317559098e-05, |
|
"loss": 0.8853, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.140293076863346e-05, |
|
"loss": 0.8797, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1264268361675936e-05, |
|
"loss": 0.8929, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1125605954718406e-05, |
|
"loss": 0.8907, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.098694354776088e-05, |
|
"loss": 0.8739, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.084828114080336e-05, |
|
"loss": 0.8813, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0709618733845835e-05, |
|
"loss": 0.8756, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0570956326888305e-05, |
|
"loss": 0.8758, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.043229391993078e-05, |
|
"loss": 0.8614, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.029363151297326e-05, |
|
"loss": 0.8682, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0154969106015734e-05, |
|
"loss": 0.8782, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.001630669905821e-05, |
|
"loss": 0.8633, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.987764429210068e-05, |
|
"loss": 0.8728, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.973898188514316e-05, |
|
"loss": 0.8588, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.960031947818563e-05, |
|
"loss": 0.8656, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.946165707122811e-05, |
|
"loss": 0.8619, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9322994664270586e-05, |
|
"loss": 0.8472, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9184332257313056e-05, |
|
"loss": 0.8568, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.904566985035553e-05, |
|
"loss": 0.8561, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.890700744339801e-05, |
|
"loss": 0.8519, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8768345036440485e-05, |
|
"loss": 0.8442, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8629682629482955e-05, |
|
"loss": 0.8522, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.849102022252543e-05, |
|
"loss": 0.8414, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.835235781556791e-05, |
|
"loss": 0.8303, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8213695408610384e-05, |
|
"loss": 0.841, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.807503300165286e-05, |
|
"loss": 0.8283, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.793637059469533e-05, |
|
"loss": 0.8377, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7797708187737806e-05, |
|
"loss": 0.8257, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.765904578078028e-05, |
|
"loss": 0.8328, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.752038337382276e-05, |
|
"loss": 0.8217, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7381720966865236e-05, |
|
"loss": 0.8237, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7243058559907705e-05, |
|
"loss": 0.8186, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.710439615295018e-05, |
|
"loss": 0.8302, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.696573374599266e-05, |
|
"loss": 0.8221, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6827071339035135e-05, |
|
"loss": 0.8221, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6688408932077604e-05, |
|
"loss": 0.8258, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.654974652512008e-05, |
|
"loss": 0.8289, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.641108411816256e-05, |
|
"loss": 0.8089, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6272421711205034e-05, |
|
"loss": 0.8113, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.613375930424751e-05, |
|
"loss": 0.8075, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.599509689728998e-05, |
|
"loss": 0.8048, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5856434490332456e-05, |
|
"loss": 0.8089, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.571777208337493e-05, |
|
"loss": 0.8097, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.557910967641741e-05, |
|
"loss": 0.7908, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5440447269459885e-05, |
|
"loss": 0.8128, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5301784862502355e-05, |
|
"loss": 0.8109, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.516312245554483e-05, |
|
"loss": 0.804, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.502446004858731e-05, |
|
"loss": 0.805, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4885797641629784e-05, |
|
"loss": 0.7859, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4747135234672254e-05, |
|
"loss": 0.7881, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.460847282771473e-05, |
|
"loss": 0.8019, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.446981042075721e-05, |
|
"loss": 0.7831, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.433114801379968e-05, |
|
"loss": 0.7946, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.419248560684216e-05, |
|
"loss": 0.7919, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.405382319988463e-05, |
|
"loss": 0.7894, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3915160792927106e-05, |
|
"loss": 0.7884, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.377649838596958e-05, |
|
"loss": 0.7873, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.363783597901206e-05, |
|
"loss": 0.799, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3499173572054535e-05, |
|
"loss": 0.7809, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.336051116509701e-05, |
|
"loss": 0.7821, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.322184875813948e-05, |
|
"loss": 0.7348, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.308318635118196e-05, |
|
"loss": 0.7154, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.2944523944224434e-05, |
|
"loss": 0.7056, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.280586153726691e-05, |
|
"loss": 0.701, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.266719913030939e-05, |
|
"loss": 0.7071, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.252853672335186e-05, |
|
"loss": 0.7016, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.238987431639434e-05, |
|
"loss": 0.7186, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.225121190943681e-05, |
|
"loss": 0.7182, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2112549502479286e-05, |
|
"loss": 0.7152, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.197388709552176e-05, |
|
"loss": 0.7057, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.183522468856424e-05, |
|
"loss": 0.6946, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1696562281606715e-05, |
|
"loss": 0.6964, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.155789987464919e-05, |
|
"loss": 0.7067, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.141923746769166e-05, |
|
"loss": 0.7076, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.128057506073414e-05, |
|
"loss": 0.7178, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1141912653776614e-05, |
|
"loss": 0.6996, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.100325024681909e-05, |
|
"loss": 0.7006, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.086458783986156e-05, |
|
"loss": 0.69, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0725925432904037e-05, |
|
"loss": 0.7071, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.058726302594651e-05, |
|
"loss": 0.7014, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0448600618988986e-05, |
|
"loss": 0.7051, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0309938212031462e-05, |
|
"loss": 0.7074, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0171275805073935e-05, |
|
"loss": 0.7072, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0032613398116412e-05, |
|
"loss": 0.7031, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.989395099115889e-05, |
|
"loss": 0.7106, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.975528858420136e-05, |
|
"loss": 0.7008, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9616626177243838e-05, |
|
"loss": 0.7066, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.947796377028631e-05, |
|
"loss": 0.7051, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9339301363328787e-05, |
|
"loss": 0.7114, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9200638956371264e-05, |
|
"loss": 0.6977, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.906197654941374e-05, |
|
"loss": 0.6933, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.892331414245621e-05, |
|
"loss": 0.6886, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8784651735498686e-05, |
|
"loss": 0.7023, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8645989328541163e-05, |
|
"loss": 0.705, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.850732692158364e-05, |
|
"loss": 0.6944, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8368664514626116e-05, |
|
"loss": 0.7034, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8230002107668585e-05, |
|
"loss": 0.6887, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.809133970071106e-05, |
|
"loss": 0.7061, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7952677293753538e-05, |
|
"loss": 0.6973, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7814014886796014e-05, |
|
"loss": 0.6818, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.767535247983849e-05, |
|
"loss": 0.6778, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.753669007288096e-05, |
|
"loss": 0.6856, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7398027665923437e-05, |
|
"loss": 0.6878, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7259365258965913e-05, |
|
"loss": 0.6899, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.712070285200839e-05, |
|
"loss": 0.6949, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.698204044505086e-05, |
|
"loss": 0.6895, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6843378038093336e-05, |
|
"loss": 0.6728, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6704715631135812e-05, |
|
"loss": 0.6826, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.656605322417829e-05, |
|
"loss": 0.6925, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6427390817220765e-05, |
|
"loss": 0.6953, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6288728410263235e-05, |
|
"loss": 0.6911, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.615006600330571e-05, |
|
"loss": 0.6857, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6011403596348188e-05, |
|
"loss": 0.6787, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5872741189390664e-05, |
|
"loss": 0.6742, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.573407878243314e-05, |
|
"loss": 0.6711, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.559541637547561e-05, |
|
"loss": 0.6719, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5456753968518087e-05, |
|
"loss": 0.6808, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5318091561560563e-05, |
|
"loss": 0.6815, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.517942915460304e-05, |
|
"loss": 0.6819, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5040766747645513e-05, |
|
"loss": 0.6823, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4902104340687986e-05, |
|
"loss": 0.6833, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4763441933730462e-05, |
|
"loss": 0.6659, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.462477952677294e-05, |
|
"loss": 0.6755, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4486117119815415e-05, |
|
"loss": 0.6567, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4347454712857888e-05, |
|
"loss": 0.6739, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4208792305900364e-05, |
|
"loss": 0.6695, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.407012989894284e-05, |
|
"loss": 0.6727, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3931467491985314e-05, |
|
"loss": 0.6748, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.379280508502779e-05, |
|
"loss": 0.689, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3654142678070267e-05, |
|
"loss": 0.6685, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.351548027111274e-05, |
|
"loss": 0.6692, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3376817864155216e-05, |
|
"loss": 0.6645, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.323815545719769e-05, |
|
"loss": 0.6567, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3099493050240166e-05, |
|
"loss": 0.6744, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.296083064328264e-05, |
|
"loss": 0.6607, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2822168236325115e-05, |
|
"loss": 0.6663, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.268350582936759e-05, |
|
"loss": 0.6625, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2544843422410065e-05, |
|
"loss": 0.6608, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.240618101545254e-05, |
|
"loss": 0.6746, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2267518608495014e-05, |
|
"loss": 0.653, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.212885620153749e-05, |
|
"loss": 0.6539, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1990193794579964e-05, |
|
"loss": 0.6706, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.185153138762244e-05, |
|
"loss": 0.6619, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1712868980664916e-05, |
|
"loss": 0.6659, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.157420657370739e-05, |
|
"loss": 0.6628, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1435544166749866e-05, |
|
"loss": 0.6568, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.129688175979234e-05, |
|
"loss": 0.6644, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1158219352834815e-05, |
|
"loss": 0.655, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.101955694587729e-05, |
|
"loss": 0.6536, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0880894538919765e-05, |
|
"loss": 0.6755, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.074223213196224e-05, |
|
"loss": 0.6575, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0603569725004714e-05, |
|
"loss": 0.6535, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.046490731804719e-05, |
|
"loss": 0.6531, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0326244911089664e-05, |
|
"loss": 0.656, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.018758250413214e-05, |
|
"loss": 0.6459, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0048920097174613e-05, |
|
"loss": 0.6646, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.991025769021709e-05, |
|
"loss": 0.6518, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9771595283259566e-05, |
|
"loss": 0.6539, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.963293287630204e-05, |
|
"loss": 0.6568, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9494270469344516e-05, |
|
"loss": 0.6647, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.935560806238699e-05, |
|
"loss": 0.6682, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9216945655429465e-05, |
|
"loss": 0.6564, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.907828324847194e-05, |
|
"loss": 0.6428, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8939620841514415e-05, |
|
"loss": 0.6488, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.880095843455689e-05, |
|
"loss": 0.6384, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8662296027599367e-05, |
|
"loss": 0.6545, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.852363362064184e-05, |
|
"loss": 0.6477, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8384971213684317e-05, |
|
"loss": 0.6552, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8246308806726793e-05, |
|
"loss": 0.6497, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8107646399769266e-05, |
|
"loss": 0.6604, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7968983992811743e-05, |
|
"loss": 0.6485, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.783032158585422e-05, |
|
"loss": 0.6455, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7691659178896692e-05, |
|
"loss": 0.6377, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.755299677193917e-05, |
|
"loss": 0.6406, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7414334364981642e-05, |
|
"loss": 0.6398, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7275671958024118e-05, |
|
"loss": 0.6443, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.713700955106659e-05, |
|
"loss": 0.6327, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6998347144109068e-05, |
|
"loss": 0.6404, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6859684737151544e-05, |
|
"loss": 0.6442, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6721022330194017e-05, |
|
"loss": 0.6319, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6582359923236494e-05, |
|
"loss": 0.5924, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6443697516278967e-05, |
|
"loss": 0.5649, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6305035109321443e-05, |
|
"loss": 0.5683, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.616637270236392e-05, |
|
"loss": 0.5665, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6027710295406392e-05, |
|
"loss": 0.5736, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.588904788844887e-05, |
|
"loss": 0.5595, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5750385481491342e-05, |
|
"loss": 0.5596, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.561172307453382e-05, |
|
"loss": 0.565, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.547306066757629e-05, |
|
"loss": 0.554, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5334398260618768e-05, |
|
"loss": 0.5606, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5195735853661244e-05, |
|
"loss": 0.5725, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5057073446703717e-05, |
|
"loss": 0.5499, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4918411039746194e-05, |
|
"loss": 0.5634, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4779748632788667e-05, |
|
"loss": 0.5664, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4641086225831143e-05, |
|
"loss": 0.5692, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4502423818873618e-05, |
|
"loss": 0.5705, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4363761411916093e-05, |
|
"loss": 0.5569, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4225099004958569e-05, |
|
"loss": 0.5596, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4086436598001044e-05, |
|
"loss": 0.5492, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3947774191043519e-05, |
|
"loss": 0.5534, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3809111784085993e-05, |
|
"loss": 0.5613, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.367044937712847e-05, |
|
"loss": 0.5682, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3531786970170943e-05, |
|
"loss": 0.5638, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.339312456321342e-05, |
|
"loss": 0.5755, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3254462156255896e-05, |
|
"loss": 0.5574, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3115799749298369e-05, |
|
"loss": 0.5691, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2977137342340845e-05, |
|
"loss": 0.5591, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2838474935383318e-05, |
|
"loss": 0.5601, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2699812528425795e-05, |
|
"loss": 0.5558, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2561150121468268e-05, |
|
"loss": 0.556, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2422487714510744e-05, |
|
"loss": 0.5487, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2283825307553219e-05, |
|
"loss": 0.5625, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2145162900595694e-05, |
|
"loss": 0.5595, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2006500493638168e-05, |
|
"loss": 0.5566, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1867838086680643e-05, |
|
"loss": 0.5573, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.172917567972312e-05, |
|
"loss": 0.5679, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1590513272765594e-05, |
|
"loss": 0.5678, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1451850865808069e-05, |
|
"loss": 0.5582, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1313188458850545e-05, |
|
"loss": 0.5558, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.117452605189302e-05, |
|
"loss": 0.5564, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1035863644935495e-05, |
|
"loss": 0.5551, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.089720123797797e-05, |
|
"loss": 0.5582, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0758538831020446e-05, |
|
"loss": 0.5558, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.061987642406292e-05, |
|
"loss": 0.5594, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0481214017105396e-05, |
|
"loss": 0.5576, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.034255161014787e-05, |
|
"loss": 0.5557, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0203889203190345e-05, |
|
"loss": 0.5567, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.006522679623282e-05, |
|
"loss": 0.5592, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.926564389275294e-06, |
|
"loss": 0.5555, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.787901982317771e-06, |
|
"loss": 0.5528, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.649239575360246e-06, |
|
"loss": 0.5496, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.51057716840272e-06, |
|
"loss": 0.5588, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.371914761445195e-06, |
|
"loss": 0.5559, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.23325235448767e-06, |
|
"loss": 0.5604, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.094589947530145e-06, |
|
"loss": 0.5542, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.95592754057262e-06, |
|
"loss": 0.5512, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.817265133615096e-06, |
|
"loss": 0.5521, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.67860272665757e-06, |
|
"loss": 0.5523, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.539940319700045e-06, |
|
"loss": 0.5443, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.401277912742522e-06, |
|
"loss": 0.5555, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.262615505784996e-06, |
|
"loss": 0.5434, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.123953098827471e-06, |
|
"loss": 0.5527, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.985290691869946e-06, |
|
"loss": 0.5503, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.846628284912422e-06, |
|
"loss": 0.5496, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.707965877954897e-06, |
|
"loss": 0.5401, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.569303470997372e-06, |
|
"loss": 0.5506, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.4306410640398465e-06, |
|
"loss": 0.557, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.291978657082321e-06, |
|
"loss": 0.5462, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.153316250124796e-06, |
|
"loss": 0.5561, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.014653843167271e-06, |
|
"loss": 0.5479, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.875991436209747e-06, |
|
"loss": 0.551, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.737329029252222e-06, |
|
"loss": 0.5445, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.598666622294697e-06, |
|
"loss": 0.5399, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.460004215337171e-06, |
|
"loss": 0.5511, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.321341808379647e-06, |
|
"loss": 0.5509, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.1826794014221225e-06, |
|
"loss": 0.5478, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.044016994464597e-06, |
|
"loss": 0.5434, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.905354587507072e-06, |
|
"loss": 0.548, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.7666921805495476e-06, |
|
"loss": 0.5613, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.628029773592022e-06, |
|
"loss": 0.5487, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.489367366634497e-06, |
|
"loss": 0.551, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.350704959676972e-06, |
|
"loss": 0.5485, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.212042552719447e-06, |
|
"loss": 0.5433, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.073380145761922e-06, |
|
"loss": 0.5423, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.934717738804398e-06, |
|
"loss": 0.5475, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.796055331846873e-06, |
|
"loss": 0.5441, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.657392924889348e-06, |
|
"loss": 0.5592, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.518730517931823e-06, |
|
"loss": 0.5347, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.3800681109742975e-06, |
|
"loss": 0.523, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.241405704016773e-06, |
|
"loss": 0.5407, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.102743297059248e-06, |
|
"loss": 0.5391, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.9640808901017225e-06, |
|
"loss": 0.5403, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.825418483144198e-06, |
|
"loss": 0.5397, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6867560761866733e-06, |
|
"loss": 0.5446, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.548093669229148e-06, |
|
"loss": 0.5422, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.4094312622716227e-06, |
|
"loss": 0.5428, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.2707688553140983e-06, |
|
"loss": 0.5433, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1321064483565735e-06, |
|
"loss": 0.5235, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9934440413990486e-06, |
|
"loss": 0.5394, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8547816344415234e-06, |
|
"loss": 0.5416, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7161192274839985e-06, |
|
"loss": 0.5407, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5774568205264733e-06, |
|
"loss": 0.5323, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.438794413568949e-06, |
|
"loss": 0.5463, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3001320066114236e-06, |
|
"loss": 0.5348, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1614695996538988e-06, |
|
"loss": 0.5341, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.022807192696374e-06, |
|
"loss": 0.54, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8841447857388489e-06, |
|
"loss": 0.5435, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.745482378781324e-06, |
|
"loss": 0.5384, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.606819971823799e-06, |
|
"loss": 0.5398, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4681575648662741e-06, |
|
"loss": 0.539, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.329495157908749e-06, |
|
"loss": 0.5348, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1908327509512242e-06, |
|
"loss": 0.5231, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0521703439936992e-06, |
|
"loss": 0.5353, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.135079370361742e-07, |
|
"loss": 0.5421, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.748455300786494e-07, |
|
"loss": 0.5531, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.361831231211245e-07, |
|
"loss": 0.5364, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.975207161635994e-07, |
|
"loss": 0.5406, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.588583092060745e-07, |
|
"loss": 0.5395, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.201959022485496e-07, |
|
"loss": 0.5286, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.153349529102467e-08, |
|
"loss": 0.5469, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 180294, |
|
"total_flos": 2.0490419005872538e+17, |
|
"train_loss": 0.45402886773448525, |
|
"train_runtime": 39387.4889, |
|
"train_samples_per_second": 45.774, |
|
"train_steps_per_second": 4.577 |
|
} |
|
], |
|
"max_steps": 180294, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.0490419005872538e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|