|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 6.0, |
|
"eval_steps": 500, |
|
"global_step": 600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 1.3296, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 1.0179, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.3208, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.8602, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.8253, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.4955, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 1.356, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 1.3023, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5e-05, |
|
"loss": 1.1931, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 1.331, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.111111111111112e-05, |
|
"loss": 1.5246, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.543, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.222222222222222e-05, |
|
"loss": 1.6207, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 1.0532, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 0.738, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 1.4879, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.444444444444444e-05, |
|
"loss": 0.8541, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3011, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.999927156177032e-05, |
|
"loss": 0.5649, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.999708626830618e-05, |
|
"loss": 1.2928, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.999344418328162e-05, |
|
"loss": 0.7608, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.998834541281798e-05, |
|
"loss": 1.058, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.998179010548088e-05, |
|
"loss": 1.3221, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.997377845227576e-05, |
|
"loss": 0.8712, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.996431068664237e-05, |
|
"loss": 0.5889, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.995338708444804e-05, |
|
"loss": 1.0206, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.994100796397954e-05, |
|
"loss": 1.0047, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.992717368593385e-05, |
|
"loss": 0.6275, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.991188465340766e-05, |
|
"loss": 1.0235, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.989514131188559e-05, |
|
"loss": 1.1663, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.987694414922724e-05, |
|
"loss": 0.5028, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.985729369565299e-05, |
|
"loss": 0.1488, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.983619052372848e-05, |
|
"loss": 0.5894, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.9813635248348e-05, |
|
"loss": 0.1516, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.978962852671656e-05, |
|
"loss": 0.6211, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.97641710583307e-05, |
|
"loss": 0.4634, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.973726358495817e-05, |
|
"loss": 1.3872, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.970890689061622e-05, |
|
"loss": 1.3923, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.967910180154889e-05, |
|
"loss": 1.1016, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.964784918620282e-05, |
|
"loss": 0.2436, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.961514995520201e-05, |
|
"loss": 1.2478, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.958100506132127e-05, |
|
"loss": 0.4752, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.95454154994584e-05, |
|
"loss": 0.7473, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.950838230660534e-05, |
|
"loss": 0.5468, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.946990656181781e-05, |
|
"loss": 0.9922, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.942998938618394e-05, |
|
"loss": 0.7146, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.938863194279163e-05, |
|
"loss": 0.941, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.934583543669453e-05, |
|
"loss": 1.1213, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.930160111487716e-05, |
|
"loss": 1.0282, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.925593026621833e-05, |
|
"loss": 0.8123, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.920882422145372e-05, |
|
"loss": 0.5971, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.916028435313708e-05, |
|
"loss": 0.644, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.911031207560027e-05, |
|
"loss": 0.5748, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.905890884491195e-05, |
|
"loss": 1.1141, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.900607615883528e-05, |
|
"loss": 1.2696, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.895181555678418e-05, |
|
"loss": 1.0389, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.889612861977853e-05, |
|
"loss": 0.6419, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.883901697039808e-05, |
|
"loss": 0.6725, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.87804822727352e-05, |
|
"loss": 0.746, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.872052623234632e-05, |
|
"loss": 0.6614, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.865915059620232e-05, |
|
"loss": 0.5904, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.85963571526376e-05, |
|
"loss": 1.119, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.853214773129796e-05, |
|
"loss": 1.0983, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.846652420308728e-05, |
|
"loss": 1.093, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.839948848011304e-05, |
|
"loss": 0.7193, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.833104251563056e-05, |
|
"loss": 1.0884, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.826118830398615e-05, |
|
"loss": 1.0729, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.818992788055889e-05, |
|
"loss": 0.6625, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.811726332170153e-05, |
|
"loss": 0.9081, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.80431967446797e-05, |
|
"loss": 0.9791, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.796773030761047e-05, |
|
"loss": 0.868, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.789086620939936e-05, |
|
"loss": 0.8326, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.781260668967628e-05, |
|
"loss": 0.6327, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.773295402873026e-05, |
|
"loss": 0.9782, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.765191054744305e-05, |
|
"loss": 1.169, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.756947860722143e-05, |
|
"loss": 0.524, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.748566060992847e-05, |
|
"loss": 0.9121, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.740045899781352e-05, |
|
"loss": 0.7816, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.731387625344104e-05, |
|
"loss": 0.1256, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.722591489961827e-05, |
|
"loss": 1.1603, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.713657749932172e-05, |
|
"loss": 1.1672, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.70458666556225e-05, |
|
"loss": 0.7319, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.695378501161045e-05, |
|
"loss": 0.8472, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.686033525031719e-05, |
|
"loss": 1.0569, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.676552009463783e-05, |
|
"loss": 0.7236, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.66693423072518e-05, |
|
"loss": 0.9983, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.657180469054213e-05, |
|
"loss": 0.5939, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.647291008651398e-05, |
|
"loss": 0.6493, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.637266137671177e-05, |
|
"loss": 0.5846, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.627106148213522e-05, |
|
"loss": 0.7209, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.61681133631542e-05, |
|
"loss": 1.147, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.606382001942255e-05, |
|
"loss": 1.1926, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.595818448979061e-05, |
|
"loss": 0.6786, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.585120985221671e-05, |
|
"loss": 0.7093, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.574289922367749e-05, |
|
"loss": 0.5315, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.563325576007701e-05, |
|
"loss": 0.743, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.552228265615492e-05, |
|
"loss": 0.9921, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.540998314539328e-05, |
|
"loss": 0.7962, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.529636049992234e-05, |
|
"loss": 0.7813, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.518141803042527e-05, |
|
"loss": 1.0127, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.506515908604162e-05, |
|
"loss": 1.0064, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.494758705426978e-05, |
|
"loss": 1.1139, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.482870536086823e-05, |
|
"loss": 0.5693, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.470851746975582e-05, |
|
"loss": 0.5606, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.458702688291073e-05, |
|
"loss": 0.5032, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.446423714026846e-05, |
|
"loss": 0.5778, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.434015181961873e-05, |
|
"loss": 0.6076, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.421477453650118e-05, |
|
"loss": 0.5808, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.408810894410009e-05, |
|
"loss": 0.5576, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.396015873313781e-05, |
|
"loss": 1.0834, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.38309276317674e-05, |
|
"loss": 0.8611, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.37004194054638e-05, |
|
"loss": 0.9198, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.356863785691428e-05, |
|
"loss": 0.0983, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.343558682590756e-05, |
|
"loss": 0.5039, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.5061, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.316569186051234e-05, |
|
"loss": 1.0256, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.302885579019627e-05, |
|
"loss": 0.9337, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.289076596533872e-05, |
|
"loss": 0.5748, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.2751426409536e-05, |
|
"loss": 0.587, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.261084118279847e-05, |
|
"loss": 1.1003, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.24690143814323e-05, |
|
"loss": 0.6208, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.232595013792002e-05, |
|
"loss": 0.1508, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.218165262080023e-05, |
|
"loss": 0.522, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.203612603454604e-05, |
|
"loss": 1.036, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.18893746194426e-05, |
|
"loss": 0.609, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.174140265146356e-05, |
|
"loss": 1.0418, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.159221444214645e-05, |
|
"loss": 0.4341, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.144181433846707e-05, |
|
"loss": 0.8735, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.129020672271283e-05, |
|
"loss": 0.4226, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.113739601235507e-05, |
|
"loss": 0.9683, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.09833866599203e-05, |
|
"loss": 0.5853, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.082818315286055e-05, |
|
"loss": 0.3069, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.067179001342252e-05, |
|
"loss": 0.6285, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.051421179851588e-05, |
|
"loss": 0.592, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.035545309958046e-05, |
|
"loss": 1.1428, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.01955185424525e-05, |
|
"loss": 0.407, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.003441278722981e-05, |
|
"loss": 0.4145, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 8.987214052813604e-05, |
|
"loss": 1.0347, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 8.970870649338387e-05, |
|
"loss": 1.2144, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.954411544503729e-05, |
|
"loss": 1.0854, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 8.937837217887273e-05, |
|
"loss": 0.5885, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 8.921148152423946e-05, |
|
"loss": 1.0299, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 8.904344834391882e-05, |
|
"loss": 1.1687, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 8.887427753398248e-05, |
|
"loss": 1.1054, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 8.870397402364984e-05, |
|
"loss": 0.9101, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 8.853254277514446e-05, |
|
"loss": 0.5874, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.835998878354931e-05, |
|
"loss": 0.4957, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.818631707666135e-05, |
|
"loss": 0.5342, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.801153271484502e-05, |
|
"loss": 1.0376, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 8.783564079088477e-05, |
|
"loss": 0.8963, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 8.765864642983665e-05, |
|
"loss": 0.7895, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 8.748055478887904e-05, |
|
"loss": 0.9246, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 8.73013710571623e-05, |
|
"loss": 1.1028, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 8.712110045565768e-05, |
|
"loss": 0.896, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 8.693974823700506e-05, |
|
"loss": 0.9723, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 8.675731968536002e-05, |
|
"loss": 1.2289, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 8.657382011623981e-05, |
|
"loss": 1.1148, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.638925487636848e-05, |
|
"loss": 0.6829, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 8.620362934352109e-05, |
|
"loss": 0.9895, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 8.6016948926367e-05, |
|
"loss": 0.5653, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.582921906431237e-05, |
|
"loss": 0.8259, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 8.564044522734147e-05, |
|
"loss": 0.7777, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.545063291585752e-05, |
|
"loss": 0.9909, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.52597876605223e-05, |
|
"loss": 0.4626, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.506791502209496e-05, |
|
"loss": 1.051, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.487502059127015e-05, |
|
"loss": 1.0609, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.468110998851496e-05, |
|
"loss": 0.6606, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.448618886390522e-05, |
|
"loss": 0.7334, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.429026289696091e-05, |
|
"loss": 1.0334, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.40933377964806e-05, |
|
"loss": 0.4235, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.389541930037516e-05, |
|
"loss": 1.0154, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.369651317550054e-05, |
|
"loss": 0.165, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.349662521748977e-05, |
|
"loss": 0.9675, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.329576125058406e-05, |
|
"loss": 0.351, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.309392712746308e-05, |
|
"loss": 0.8936, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.289112872907454e-05, |
|
"loss": 0.4929, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.268737196446264e-05, |
|
"loss": 0.8426, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.248266277059607e-05, |
|
"loss": 0.8089, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.227700711219493e-05, |
|
"loss": 0.4637, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.2070410981557e-05, |
|
"loss": 0.1139, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.186288039838304e-05, |
|
"loss": 0.0912, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.16544214096015e-05, |
|
"loss": 1.0137, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.144504008919222e-05, |
|
"loss": 0.721, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.123474253800957e-05, |
|
"loss": 0.6093, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.102353488360454e-05, |
|
"loss": 1.1467, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.081142328004637e-05, |
|
"loss": 1.042, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 8.059841390774307e-05, |
|
"loss": 0.5714, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 8.038451297326145e-05, |
|
"loss": 0.6604, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 8.016972670914624e-05, |
|
"loss": 1.0334, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.995406137373846e-05, |
|
"loss": 0.9352, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.973752325099314e-05, |
|
"loss": 0.3907, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.952011865029614e-05, |
|
"loss": 0.3872, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.930185390628035e-05, |
|
"loss": 0.5052, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 7.908273537864113e-05, |
|
"loss": 0.9008, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 7.886276945195099e-05, |
|
"loss": 0.1056, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.86419625354735e-05, |
|
"loss": 0.8095, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 7.842032106297666e-05, |
|
"loss": 1.1502, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.819785149254532e-05, |
|
"loss": 1.1409, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 7.797456030639313e-05, |
|
"loss": 0.0934, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 7.77504540106735e-05, |
|
"loss": 0.9729, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 7.752553913529018e-05, |
|
"loss": 0.6731, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 7.729982223370691e-05, |
|
"loss": 0.6876, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 7.707330988275651e-05, |
|
"loss": 0.8572, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 7.68460086824492e-05, |
|
"loss": 0.7632, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 7.661792525578035e-05, |
|
"loss": 0.8381, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 7.638906624853743e-05, |
|
"loss": 0.1438, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 7.61594383291065e-05, |
|
"loss": 0.9388, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 7.592904818827775e-05, |
|
"loss": 0.7623, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 7.569790253905059e-05, |
|
"loss": 0.7193, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 7.546600811643816e-05, |
|
"loss": 0.4692, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 7.523337167727095e-05, |
|
"loss": 0.852, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.4806, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 7.476589988449939e-05, |
|
"loss": 0.9571, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 7.453107815186803e-05, |
|
"loss": 0.8643, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 7.429554164423102e-05, |
|
"loss": 0.1021, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 7.405929722454026e-05, |
|
"loss": 0.8053, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 7.382235177637437e-05, |
|
"loss": 0.7002, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 7.358471220373832e-05, |
|
"loss": 0.9525, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 7.334638543086203e-05, |
|
"loss": 0.5114, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 7.310737840199885e-05, |
|
"loss": 1.1477, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 7.286769808122304e-05, |
|
"loss": 0.9718, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.262735145222696e-05, |
|
"loss": 0.4707, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.238634551811749e-05, |
|
"loss": 0.3755, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 7.214468730121208e-05, |
|
"loss": 0.4897, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.190238384283412e-05, |
|
"loss": 1.0034, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.165944220310767e-05, |
|
"loss": 1.0291, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 7.141586946075183e-05, |
|
"loss": 0.7632, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 7.117167271287453e-05, |
|
"loss": 0.4562, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 7.092685907476558e-05, |
|
"loss": 0.9181, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 7.068143567968957e-05, |
|
"loss": 0.1491, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 7.043540967867782e-05, |
|
"loss": 0.5085, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 7.018878824032009e-05, |
|
"loss": 0.4929, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 6.994157855055576e-05, |
|
"loss": 0.9621, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 6.969378781246436e-05, |
|
"loss": 0.4147, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 6.944542324605578e-05, |
|
"loss": 0.9537, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 6.919649208805981e-05, |
|
"loss": 0.3851, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 6.894700159171534e-05, |
|
"loss": 0.6313, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 6.869695902655897e-05, |
|
"loss": 1.0456, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 6.844637167821326e-05, |
|
"loss": 0.8106, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 6.819524684817438e-05, |
|
"loss": 0.6425, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 6.794359185359938e-05, |
|
"loss": 0.5893, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 6.769141402709305e-05, |
|
"loss": 0.3764, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 6.743872071649411e-05, |
|
"loss": 0.8726, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 6.718551928466132e-05, |
|
"loss": 0.4516, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 6.693181710925878e-05, |
|
"loss": 0.1484, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 6.667762158254104e-05, |
|
"loss": 0.4944, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 6.642294011113764e-05, |
|
"loss": 0.5293, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 6.616778011583743e-05, |
|
"loss": 0.8189, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 6.59121490313722e-05, |
|
"loss": 0.3628, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 6.565605430620013e-05, |
|
"loss": 0.8436, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 6.539950340228877e-05, |
|
"loss": 0.9848, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 6.514250379489753e-05, |
|
"loss": 0.4029, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 6.488506297236003e-05, |
|
"loss": 0.5044, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 6.462718843586571e-05, |
|
"loss": 0.0983, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 6.436888769924142e-05, |
|
"loss": 0.4742, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 6.411016828873239e-05, |
|
"loss": 0.8615, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 6.385103774278303e-05, |
|
"loss": 1.0037, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.359150361181715e-05, |
|
"loss": 0.3709, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.333157345801809e-05, |
|
"loss": 0.7423, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.307125485510828e-05, |
|
"loss": 0.5818, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.281055538812861e-05, |
|
"loss": 0.4394, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.254948265321744e-05, |
|
"loss": 0.8806, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.228804425738917e-05, |
|
"loss": 0.6338, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.202624781831268e-05, |
|
"loss": 0.9208, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.176410096408938e-05, |
|
"loss": 0.6143, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.150161133303089e-05, |
|
"loss": 0.665, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.123878657343648e-05, |
|
"loss": 0.1202, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.0975634343370256e-05, |
|
"loss": 0.7635, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.071216231043799e-05, |
|
"loss": 0.3946, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.044837815156377e-05, |
|
"loss": 0.9457, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.018428955276617e-05, |
|
"loss": 0.4996, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.99199042089345e-05, |
|
"loss": 0.0955, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.9655229823604406e-05, |
|
"loss": 1.0307, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 5.939027410873351e-05, |
|
"loss": 0.397, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 5.912504478447669e-05, |
|
"loss": 0.9636, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 5.885954957896115e-05, |
|
"loss": 0.4741, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 5.859379622806115e-05, |
|
"loss": 0.8797, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 5.832779247517273e-05, |
|
"loss": 0.7577, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 5.8061546070987994e-05, |
|
"loss": 0.7404, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.779506477326933e-05, |
|
"loss": 0.8952, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 5.752835634662331e-05, |
|
"loss": 0.7155, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 5.726142856227452e-05, |
|
"loss": 0.9328, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.699428919783906e-05, |
|
"loss": 0.9198, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.672694603709794e-05, |
|
"loss": 0.9023, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.645940686977033e-05, |
|
"loss": 0.4921, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 5.619167949128652e-05, |
|
"loss": 0.5109, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 5.59237717025608e-05, |
|
"loss": 0.6515, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 5.565569130976422e-05, |
|
"loss": 0.8769, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.538744612409701e-05, |
|
"loss": 0.718, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.5119043961561136e-05, |
|
"loss": 0.9196, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.4850492642732406e-05, |
|
"loss": 0.1985, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.458179999253275e-05, |
|
"loss": 0.9993, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 5.4312973840002045e-05, |
|
"loss": 0.789, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 5.4044022018070214e-05, |
|
"loss": 0.2474, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 5.37749523633288e-05, |
|
"loss": 0.1283, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.3505772715802704e-05, |
|
"loss": 0.2069, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.3236490918721794e-05, |
|
"loss": 0.7092, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.296711481829226e-05, |
|
"loss": 0.4774, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.2697652263468125e-05, |
|
"loss": 0.6437, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.242811110572242e-05, |
|
"loss": 0.8045, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 5.2158499198818503e-05, |
|
"loss": 0.6945, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 5.188882439858117e-05, |
|
"loss": 0.7385, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 5.1619094562667804e-05, |
|
"loss": 0.455, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 5.134931755033936e-05, |
|
"loss": 0.5975, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 5.107950122223139e-05, |
|
"loss": 0.1079, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 5.080965344012508e-05, |
|
"loss": 0.888, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 5.053978206671801e-05, |
|
"loss": 0.4048, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 5.0269894965395225e-05, |
|
"loss": 0.2892, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 5e-05, |
|
"loss": 0.7617, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.973010503460479e-05, |
|
"loss": 0.507, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.946021793328201e-05, |
|
"loss": 0.8717, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.919034655987493e-05, |
|
"loss": 0.7561, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.892049877776861e-05, |
|
"loss": 0.5775, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.865068244966066e-05, |
|
"loss": 0.4048, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.838090543733222e-05, |
|
"loss": 0.771, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.8111175601418844e-05, |
|
"loss": 0.4579, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.784150080118152e-05, |
|
"loss": 0.5625, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.7571888894277604e-05, |
|
"loss": 0.4475, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.730234773653188e-05, |
|
"loss": 0.1464, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.703288518170774e-05, |
|
"loss": 0.3964, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.676350908127822e-05, |
|
"loss": 0.4403, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.6494227284197294e-05, |
|
"loss": 0.6092, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 4.622504763667122e-05, |
|
"loss": 0.3271, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.59559779819298e-05, |
|
"loss": 0.647, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.568702615999797e-05, |
|
"loss": 0.582, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.541820000746727e-05, |
|
"loss": 0.404, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.51495073572676e-05, |
|
"loss": 0.4076, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.4880956038438876e-05, |
|
"loss": 0.5281, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 4.461255387590299e-05, |
|
"loss": 0.5991, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.434430869023579e-05, |
|
"loss": 0.5625, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.4076228297439204e-05, |
|
"loss": 0.4749, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 4.38083205087135e-05, |
|
"loss": 0.8395, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.35405931302297e-05, |
|
"loss": 0.1073, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.3273053962902076e-05, |
|
"loss": 0.7115, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 4.3005710802160965e-05, |
|
"loss": 0.7178, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.27385714377255e-05, |
|
"loss": 0.4655, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.2471643653376685e-05, |
|
"loss": 0.3833, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.220493522673067e-05, |
|
"loss": 0.8855, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.193845392901201e-05, |
|
"loss": 0.2677, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.1672207524827275e-05, |
|
"loss": 0.7882, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.140620377193885e-05, |
|
"loss": 0.5953, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.114045042103887e-05, |
|
"loss": 0.8057, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.087495521552331e-05, |
|
"loss": 0.9334, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.06097258912665e-05, |
|
"loss": 0.5721, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.0344770176395606e-05, |
|
"loss": 0.7482, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.0080095791065505e-05, |
|
"loss": 0.3746, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.9815710447233836e-05, |
|
"loss": 0.6298, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.955162184843625e-05, |
|
"loss": 0.6947, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.9287837689562016e-05, |
|
"loss": 0.6928, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.902436565662977e-05, |
|
"loss": 0.2789, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.876121342656355e-05, |
|
"loss": 0.326, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.849838866696913e-05, |
|
"loss": 0.7283, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.823589903591063e-05, |
|
"loss": 0.3768, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.7973752181687335e-05, |
|
"loss": 0.6908, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.771195574261084e-05, |
|
"loss": 0.655, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.745051734678256e-05, |
|
"loss": 0.665, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.718944461187138e-05, |
|
"loss": 0.7143, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.692874514489173e-05, |
|
"loss": 0.4508, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.666842654198191e-05, |
|
"loss": 0.7157, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.640849638818286e-05, |
|
"loss": 0.5367, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.614896225721699e-05, |
|
"loss": 0.5101, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.588983171126762e-05, |
|
"loss": 0.6966, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.5631112300758595e-05, |
|
"loss": 0.9672, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.53728115641343e-05, |
|
"loss": 0.1973, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.5114937027639985e-05, |
|
"loss": 0.769, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.4857496205102474e-05, |
|
"loss": 0.9575, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.460049659771124e-05, |
|
"loss": 0.0895, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.434394569379988e-05, |
|
"loss": 0.5265, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.408785096862782e-05, |
|
"loss": 0.9364, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.3832219884162585e-05, |
|
"loss": 0.4791, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.3577059888862364e-05, |
|
"loss": 0.5245, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.332237841745898e-05, |
|
"loss": 0.8515, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.30681828907412e-05, |
|
"loss": 0.8348, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.281448071533867e-05, |
|
"loss": 0.3995, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.2561279283505883e-05, |
|
"loss": 0.3216, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.2308585972906966e-05, |
|
"loss": 0.567, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.2056408146400614e-05, |
|
"loss": 0.7622, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.180475315182563e-05, |
|
"loss": 0.7857, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.1553628321786745e-05, |
|
"loss": 0.4483, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.130304097344103e-05, |
|
"loss": 0.3763, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.105299840828466e-05, |
|
"loss": 0.3648, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.080350791194019e-05, |
|
"loss": 0.3112, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.055457675394423e-05, |
|
"loss": 0.0819, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.0306212187535653e-05, |
|
"loss": 0.268, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.005842144944425e-05, |
|
"loss": 0.7788, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.9811211759679924e-05, |
|
"loss": 0.7045, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.9564590321322207e-05, |
|
"loss": 0.4414, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.9318564320310444e-05, |
|
"loss": 0.6668, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.907314092523442e-05, |
|
"loss": 0.481, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.882832728712551e-05, |
|
"loss": 0.3447, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.8584130539248166e-05, |
|
"loss": 0.0927, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.8340557796892354e-05, |
|
"loss": 0.8, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.8097616157165883e-05, |
|
"loss": 0.3228, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.7855312698787904e-05, |
|
"loss": 0.8464, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.761365448188253e-05, |
|
"loss": 0.0815, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.737264854777306e-05, |
|
"loss": 0.396, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.7132301918776977e-05, |
|
"loss": 0.883, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.6892621598001156e-05, |
|
"loss": 0.2582, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.6653614569137968e-05, |
|
"loss": 0.728, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.6415287796261706e-05, |
|
"loss": 0.7127, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.617764822362563e-05, |
|
"loss": 0.3259, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.5940702775459747e-05, |
|
"loss": 0.3665, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.5704458355768986e-05, |
|
"loss": 0.4499, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.5468921848131983e-05, |
|
"loss": 0.6581, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.5234100115500643e-05, |
|
"loss": 0.4261, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.7749, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.4766628322729064e-05, |
|
"loss": 0.6572, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.4533991883561868e-05, |
|
"loss": 0.3524, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 2.430209746094943e-05, |
|
"loss": 0.9622, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.407095181172227e-05, |
|
"loss": 0.604, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.3840561670893496e-05, |
|
"loss": 0.3321, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.3610933751462553e-05, |
|
"loss": 0.6422, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.3382074744219668e-05, |
|
"loss": 0.4829, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.315399131755081e-05, |
|
"loss": 0.3551, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.292669011724351e-05, |
|
"loss": 0.6241, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.2700177766293096e-05, |
|
"loss": 0.5875, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.2474460864709824e-05, |
|
"loss": 0.3197, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.2249545989326514e-05, |
|
"loss": 0.3756, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.2025439693606882e-05, |
|
"loss": 0.534, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.180214850745467e-05, |
|
"loss": 0.5832, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.1579678937023363e-05, |
|
"loss": 0.5484, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.1358037464526515e-05, |
|
"loss": 0.7997, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.1137230548049043e-05, |
|
"loss": 0.7011, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.091726462135888e-05, |
|
"loss": 0.7455, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.0698146093719656e-05, |
|
"loss": 0.3987, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.0479881349703883e-05, |
|
"loss": 0.545, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.0262476749006877e-05, |
|
"loss": 0.8259, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.0045938626261546e-05, |
|
"loss": 0.3419, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.983027329085377e-05, |
|
"loss": 0.4308, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.9615487026738543e-05, |
|
"loss": 0.6206, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.940158609225694e-05, |
|
"loss": 0.4265, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.9188576719953633e-05, |
|
"loss": 0.6544, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.8976465116395464e-05, |
|
"loss": 0.5376, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.8765257461990442e-05, |
|
"loss": 0.2594, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.8554959910807775e-05, |
|
"loss": 0.2927, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.834557859039851e-05, |
|
"loss": 0.3391, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.813711960161696e-05, |
|
"loss": 0.6223, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.7929589018443016e-05, |
|
"loss": 0.5188, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.772299288780508e-05, |
|
"loss": 0.3451, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.7517337229403946e-05, |
|
"loss": 0.7354, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 1.7312628035537387e-05, |
|
"loss": 0.3843, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.710887127092548e-05, |
|
"loss": 0.6396, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.6906072872536917e-05, |
|
"loss": 0.6972, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 1.6704238749415957e-05, |
|
"loss": 0.4035, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.6503374782510234e-05, |
|
"loss": 0.1665, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.6303486824499458e-05, |
|
"loss": 0.6172, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.6104580699624837e-05, |
|
"loss": 0.2412, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.5906662203519412e-05, |
|
"loss": 0.505, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.5709737103039103e-05, |
|
"loss": 0.4971, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.5513811136094787e-05, |
|
"loss": 0.7349, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.5318890011485065e-05, |
|
"loss": 0.3637, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.5124979408729861e-05, |
|
"loss": 0.4416, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.4932084977905042e-05, |
|
"loss": 0.5081, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.4740212339477721e-05, |
|
"loss": 0.4887, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.4549367084142484e-05, |
|
"loss": 0.5103, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.4359554772658552e-05, |
|
"loss": 0.6833, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.4170780935687656e-05, |
|
"loss": 0.5815, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.3983051073632997e-05, |
|
"loss": 0.5264, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.3796370656478935e-05, |
|
"loss": 0.352, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.3610745123631535e-05, |
|
"loss": 0.441, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.3426179883760192e-05, |
|
"loss": 0.2207, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.3242680314639993e-05, |
|
"loss": 0.3624, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.3060251762994946e-05, |
|
"loss": 0.4462, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.2878899544342327e-05, |
|
"loss": 0.3813, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.2698628942837699e-05, |
|
"loss": 0.2802, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.2519445211120979e-05, |
|
"loss": 0.5328, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.2341353570163355e-05, |
|
"loss": 0.6202, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.2164359209115234e-05, |
|
"loss": 0.4611, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.1988467285154987e-05, |
|
"loss": 0.3121, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.1813682923338653e-05, |
|
"loss": 0.5383, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.1640011216450691e-05, |
|
"loss": 0.253, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.1467457224855544e-05, |
|
"loss": 0.6717, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.129602597635015e-05, |
|
"loss": 0.6211, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.1125722466017547e-05, |
|
"loss": 0.3007, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.09565516560812e-05, |
|
"loss": 0.5946, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.0788518475760545e-05, |
|
"loss": 0.6937, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.0621627821127289e-05, |
|
"loss": 0.3618, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.0455884554962725e-05, |
|
"loss": 0.3505, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.0291293506616133e-05, |
|
"loss": 0.6379, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.012785947186397e-05, |
|
"loss": 0.6715, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 9.965587212770195e-06, |
|
"loss": 0.6154, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 9.804481457547498e-06, |
|
"loss": 0.6582, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 9.644546900419533e-06, |
|
"loss": 0.5089, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 9.485788201484126e-06, |
|
"loss": 0.1242, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 9.32820998657748e-06, |
|
"loss": 0.2903, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 9.171816847139448e-06, |
|
"loss": 0.3307, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 9.016613340079705e-06, |
|
"loss": 0.5639, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 8.86260398764494e-06, |
|
"loss": 0.1877, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 8.70979327728718e-06, |
|
"loss": 0.8098, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 8.558185661532941e-06, |
|
"loss": 0.7206, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 8.407785557853553e-06, |
|
"loss": 0.6704, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 8.25859734853645e-06, |
|
"loss": 0.5953, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 8.110625380557402e-06, |
|
"loss": 0.3174, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 7.96387396545396e-06, |
|
"loss": 0.3217, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 7.81834737919978e-06, |
|
"loss": 0.3145, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 7.67404986207999e-06, |
|
"loss": 0.2236, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 7.530985618567726e-06, |
|
"loss": 0.4421, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 7.389158817201542e-06, |
|
"loss": 0.3936, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 7.248573590464014e-06, |
|
"loss": 0.5713, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 7.109234034661289e-06, |
|
"loss": 0.6414, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 6.9711442098037375e-06, |
|
"loss": 0.6305, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 6.8343081394876715e-06, |
|
"loss": 0.663, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.197, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 6.564413174092443e-06, |
|
"loss": 0.3142, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 6.431362143085734e-06, |
|
"loss": 0.2621, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 6.299580594536214e-06, |
|
"loss": 0.4311, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 6.16907236823262e-06, |
|
"loss": 0.4916, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 6.0398412668621895e-06, |
|
"loss": 0.2118, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 5.91189105589992e-06, |
|
"loss": 0.3151, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 5.785225463498828e-06, |
|
"loss": 0.457, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 5.659848180381283e-06, |
|
"loss": 0.3601, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 5.535762859731547e-06, |
|
"loss": 0.232, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 5.412973117089287e-06, |
|
"loss": 0.5667, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 5.291482530244179e-06, |
|
"loss": 0.5585, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 5.171294639131779e-06, |
|
"loss": 0.4921, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 5.05241294573024e-06, |
|
"loss": 0.4778, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 4.934840913958388e-06, |
|
"loss": 0.5271, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 4.818581969574742e-06, |
|
"loss": 0.2727, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 4.703639500077656e-06, |
|
"loss": 0.6403, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 4.590016854606727e-06, |
|
"loss": 0.4939, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 4.477717343845078e-06, |
|
"loss": 0.5571, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 4.366744239922998e-06, |
|
"loss": 0.2923, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 4.257100776322525e-06, |
|
"loss": 0.336, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 4.148790147783288e-06, |
|
"loss": 0.4136, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 4.041815510209396e-06, |
|
"loss": 0.6364, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 3.936179980577453e-06, |
|
"loss": 0.5695, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 3.8318866368458e-06, |
|
"loss": 0.2892, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 3.728938517864794e-06, |
|
"loss": 0.2887, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 3.6273386232882343e-06, |
|
"loss": 0.2571, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 3.527089913486037e-06, |
|
"loss": 0.5463, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 3.4281953094578877e-06, |
|
"loss": 0.4527, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 3.3306576927482126e-06, |
|
"loss": 0.2259, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 3.2344799053621646e-06, |
|
"loss": 0.4806, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 3.1396647496828247e-06, |
|
"loss": 0.3638, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 3.0462149883895563e-06, |
|
"loss": 0.5568, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.9541333443775243e-06, |
|
"loss": 0.6148, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 2.8634225006782865e-06, |
|
"loss": 0.0862, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.774085100381735e-06, |
|
"loss": 0.5631, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.686123746558961e-06, |
|
"loss": 0.6981, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.5995410021864787e-06, |
|
"loss": 0.2124, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.5143393900715296e-06, |
|
"loss": 0.1605, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.430521392778573e-06, |
|
"loss": 0.5083, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.3480894525569562e-06, |
|
"loss": 0.4244, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.2670459712697377e-06, |
|
"loss": 0.259, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.187393310323721e-06, |
|
"loss": 0.088, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.1091337906006482e-06, |
|
"loss": 0.1838, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.0322696923895434e-06, |
|
"loss": 0.5571, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 1.956803255320322e-06, |
|
"loss": 0.3224, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 1.8827366782984913e-06, |
|
"loss": 0.7158, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 1.810072119441103e-06, |
|
"loss": 0.4693, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 1.73881169601387e-06, |
|
"loss": 0.4278, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 1.6689574843694433e-06, |
|
"loss": 0.0819, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 1.6005115198869603e-06, |
|
"loss": 0.4137, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 1.53347579691272e-06, |
|
"loss": 0.5221, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 1.4678522687020413e-06, |
|
"loss": 0.5139, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 1.4036428473624019e-06, |
|
"loss": 0.5131, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 1.3408494037976894e-06, |
|
"loss": 0.4282, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 1.2794737676536994e-06, |
|
"loss": 0.6512, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 1.2195177272648127e-06, |
|
"loss": 0.6242, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 1.1609830296019143e-06, |
|
"loss": 0.7032, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 1.1038713802214717e-06, |
|
"loss": 0.3343, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 1.0481844432158161e-06, |
|
"loss": 0.1955, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 9.939238411647235e-07, |
|
"loss": 0.0995, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 9.410911550880475e-07, |
|
"loss": 0.2064, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 8.896879243997347e-07, |
|
"loss": 0.3112, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 8.397156468629208e-07, |
|
"loss": 0.4229, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 7.911757785462881e-07, |
|
"loss": 0.0823, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 7.44069733781677e-07, |
|
"loss": 0.5938, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 6.983988851228473e-07, |
|
"loss": 0.7056, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 6.54164563305465e-07, |
|
"loss": 0.0954, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 6.113680572083946e-07, |
|
"loss": 0.7448, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 5.700106138160688e-07, |
|
"loss": 0.5686, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 5.300934381821998e-07, |
|
"loss": 0.2387, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 4.916176933946693e-07, |
|
"loss": 0.3178, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 4.545845005415994e-07, |
|
"loss": 0.4377, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 4.189949386787462e-07, |
|
"loss": 0.3517, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 3.848500447979908e-07, |
|
"loss": 0.7012, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 3.5215081379718074e-07, |
|
"loss": 0.3336, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 3.208981984511195e-07, |
|
"loss": 0.5298, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.9109310938378877e-07, |
|
"loss": 0.7599, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.6273641504184766e-07, |
|
"loss": 0.6857, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.3582894166930268e-07, |
|
"loss": 0.1058, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.1037147328344387e-07, |
|
"loss": 0.3377, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 1.8636475165200174e-07, |
|
"loss": 0.2177, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 1.6380947627153143e-07, |
|
"loss": 0.2113, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 1.427063043470178e-07, |
|
"loss": 0.6957, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 1.2305585077276306e-07, |
|
"loss": 0.6408, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 1.0485868811441757e-07, |
|
"loss": 0.3335, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 8.811534659234899e-08, |
|
"loss": 0.5002, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 7.282631406615447e-08, |
|
"loss": 0.7505, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 5.899203602046655e-08, |
|
"loss": 0.5746, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 4.661291555196345e-08, |
|
"loss": 0.3938, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 3.5689313357634145e-08, |
|
"loss": 0.5023, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.6221547724253337e-08, |
|
"loss": 0.6144, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 1.8209894519122252e-08, |
|
"loss": 0.4173, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 1.1654587182013953e-08, |
|
"loss": 0.5752, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 6.5558167183898955e-09, |
|
"loss": 0.0822, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.9137316938265825e-09, |
|
"loss": 0.5562, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 7.284382296801617e-10, |
|
"loss": 0.1803, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.5857, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"step": 600, |
|
"total_flos": 1.0238347568039854e+18, |
|
"train_loss": 0.6276818083102504, |
|
"train_runtime": 6533.1892, |
|
"train_samples_per_second": 0.367, |
|
"train_steps_per_second": 0.092 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 600, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 10000, |
|
"total_flos": 1.0238347568039854e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|