|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9935483870967743, |
|
"eval_steps": 500, |
|
"global_step": 696, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.714285714285715e-07, |
|
"loss": 1.7586, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.142857142857143e-06, |
|
"loss": 1.6779, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.7142857142857145e-06, |
|
"loss": 1.6773, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.285714285714286e-06, |
|
"loss": 1.6802, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.6635, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.428571428571429e-06, |
|
"loss": 1.6254, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.5515, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.571428571428572e-06, |
|
"loss": 1.5368, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.142857142857142e-06, |
|
"loss": 1.5184, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 1.4823, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.285714285714286e-06, |
|
"loss": 1.4535, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.857142857142858e-06, |
|
"loss": 1.3807, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.428571428571429e-06, |
|
"loss": 1.379, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.3351, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 1.3309, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.142857142857144e-06, |
|
"loss": 1.2838, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.714285714285715e-06, |
|
"loss": 1.2489, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.0285714285714285e-05, |
|
"loss": 1.2665, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.0857142857142858e-05, |
|
"loss": 1.2752, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 1.2046, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.1956, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.2571428571428572e-05, |
|
"loss": 1.1442, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.3142857142857145e-05, |
|
"loss": 1.1825, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.3714285714285716e-05, |
|
"loss": 1.1286, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.1387, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.4857142857142858e-05, |
|
"loss": 1.13, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.542857142857143e-05, |
|
"loss": 1.1327, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.0921, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.6571428571428574e-05, |
|
"loss": 1.0775, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 1.0485, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.7714285714285717e-05, |
|
"loss": 1.0582, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.8285714285714288e-05, |
|
"loss": 1.0672, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.885714285714286e-05, |
|
"loss": 1.0185, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.942857142857143e-05, |
|
"loss": 1.0485, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0544, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.999988705525916e-05, |
|
"loss": 1.0075, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9999548223587948e-05, |
|
"loss": 0.9891, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.999898351264021e-05, |
|
"loss": 1.0142, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.999819293517218e-05, |
|
"loss": 0.995, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.999717650904216e-05, |
|
"loss": 0.9475, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9995934257210156e-05, |
|
"loss": 0.9887, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9994466207737327e-05, |
|
"loss": 0.9438, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9992772393785366e-05, |
|
"loss": 0.9707, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.999085285361575e-05, |
|
"loss": 0.9634, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9988707630588876e-05, |
|
"loss": 0.9461, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9986336773163066e-05, |
|
"loss": 0.9447, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9983740334893503e-05, |
|
"loss": 0.8977, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9980918374430995e-05, |
|
"loss": 0.8928, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.997787095552066e-05, |
|
"loss": 0.8917, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.997459814700049e-05, |
|
"loss": 0.9113, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9971100022799778e-05, |
|
"loss": 0.9448, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.996737666193748e-05, |
|
"loss": 0.9447, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9963428148520393e-05, |
|
"loss": 0.9315, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9959254571741288e-05, |
|
"loss": 0.9246, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9954856025876874e-05, |
|
"loss": 0.9176, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.995023261028567e-05, |
|
"loss": 0.8972, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.994538442940578e-05, |
|
"loss": 0.9092, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.99403115927525e-05, |
|
"loss": 0.9203, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9935014214915884e-05, |
|
"loss": 0.9033, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.992949241555812e-05, |
|
"loss": 0.8863, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9923746319410852e-05, |
|
"loss": 0.8928, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.991777605627234e-05, |
|
"loss": 0.875, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9911581761004556e-05, |
|
"loss": 0.8642, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9905163573530114e-05, |
|
"loss": 0.8559, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9898521638829115e-05, |
|
"loss": 0.8661, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9891656106935878e-05, |
|
"loss": 0.8797, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9884567132935545e-05, |
|
"loss": 0.8574, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.987725487696059e-05, |
|
"loss": 0.8368, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.986971950418718e-05, |
|
"loss": 0.8877, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9861961184831456e-05, |
|
"loss": 0.8518, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9853980094145698e-05, |
|
"loss": 0.8793, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.984577641241435e-05, |
|
"loss": 0.8272, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9837350324949947e-05, |
|
"loss": 0.8362, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9828702022088943e-05, |
|
"loss": 0.866, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.981983169918741e-05, |
|
"loss": 0.8486, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.981073955661661e-05, |
|
"loss": 0.8609, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9801425799758475e-05, |
|
"loss": 0.8226, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9791890639000976e-05, |
|
"loss": 0.8371, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9782134289733374e-05, |
|
"loss": 0.7946, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.977215697234133e-05, |
|
"loss": 0.8845, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9761958912201948e-05, |
|
"loss": 0.7966, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9751540339678686e-05, |
|
"loss": 0.8466, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9740901490116132e-05, |
|
"loss": 0.828, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9730042603834715e-05, |
|
"loss": 0.794, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9718963926125246e-05, |
|
"loss": 0.8455, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9707665707243406e-05, |
|
"loss": 0.8334, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9696148202404073e-05, |
|
"loss": 0.7748, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.968441167177557e-05, |
|
"loss": 0.7988, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9672456380473784e-05, |
|
"loss": 0.8018, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.966028259855616e-05, |
|
"loss": 0.8104, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.964789060101563e-05, |
|
"loss": 0.7908, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.963528066777439e-05, |
|
"loss": 0.784, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.962245308367756e-05, |
|
"loss": 0.7828, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9609408138486777e-05, |
|
"loss": 0.7801, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.959614612687363e-05, |
|
"loss": 0.8136, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9582667348413014e-05, |
|
"loss": 0.7945, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9568972107576358e-05, |
|
"loss": 0.787, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.955506071372474e-05, |
|
"loss": 0.8, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9540933481101925e-05, |
|
"loss": 0.7672, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9526590728827232e-05, |
|
"loss": 0.7956, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9512032780888347e-05, |
|
"loss": 0.793, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9497259966134008e-05, |
|
"loss": 0.7404, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9482272618266558e-05, |
|
"loss": 0.7726, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9467071075834422e-05, |
|
"loss": 0.7503, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.945165568222445e-05, |
|
"loss": 0.7421, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9436026785654175e-05, |
|
"loss": 0.7826, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9420184739163932e-05, |
|
"loss": 0.7774, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9404129900608884e-05, |
|
"loss": 0.798, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9387862632650945e-05, |
|
"loss": 0.7328, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9371383302750594e-05, |
|
"loss": 0.7589, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9354692283158555e-05, |
|
"loss": 0.788, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.933778995090741e-05, |
|
"loss": 0.7617, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.932067668780306e-05, |
|
"loss": 0.7585, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.930335288041612e-05, |
|
"loss": 0.744, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.928581892007318e-05, |
|
"loss": 0.7634, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.926807520284796e-05, |
|
"loss": 0.7431, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9250122129552365e-05, |
|
"loss": 0.7554, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.923196010572744e-05, |
|
"loss": 0.7534, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9213589541634203e-05, |
|
"loss": 0.723, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9195010852244367e-05, |
|
"loss": 0.7624, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.917622445723099e-05, |
|
"loss": 0.7538, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.915723078095898e-05, |
|
"loss": 0.7327, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9138030252475487e-05, |
|
"loss": 0.7138, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.911862330550027e-05, |
|
"loss": 0.7726, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9099010378415847e-05, |
|
"loss": 0.8036, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9079191914257606e-05, |
|
"loss": 0.7442, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9059168360703805e-05, |
|
"loss": 0.7361, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9038940170065467e-05, |
|
"loss": 0.7595, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9018507799276132e-05, |
|
"loss": 0.7403, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.899787170988157e-05, |
|
"loss": 0.702, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8977032368029333e-05, |
|
"loss": 0.7371, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8955990244458234e-05, |
|
"loss": 0.7635, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8934745814487715e-05, |
|
"loss": 0.7208, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8913299558007096e-05, |
|
"loss": 0.6919, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.889165195946476e-05, |
|
"loss": 0.7421, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8869803507857186e-05, |
|
"loss": 0.7129, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.884775469671791e-05, |
|
"loss": 0.7216, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.88255060241064e-05, |
|
"loss": 0.7125, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.880305799259675e-05, |
|
"loss": 0.6914, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8780411109266387e-05, |
|
"loss": 0.7107, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8757565885684584e-05, |
|
"loss": 0.7369, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8734522837900917e-05, |
|
"loss": 0.7171, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8711282486433595e-05, |
|
"loss": 0.6836, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8687845356257708e-05, |
|
"loss": 0.6784, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8664211976793384e-05, |
|
"loss": 0.6925, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.86403828818938e-05, |
|
"loss": 0.6724, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8616358609833146e-05, |
|
"loss": 0.6894, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.859213970329446e-05, |
|
"loss": 0.7129, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8567726709357367e-05, |
|
"loss": 0.6725, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.854312017948572e-05, |
|
"loss": 0.7012, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.851832066951515e-05, |
|
"loss": 0.7045, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8493328739640497e-05, |
|
"loss": 0.684, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8468144954403176e-05, |
|
"loss": 0.6687, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.84427698826784e-05, |
|
"loss": 0.6958, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.841720409766235e-05, |
|
"loss": 0.6714, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8391448176859222e-05, |
|
"loss": 0.6661, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.836550270206818e-05, |
|
"loss": 0.7179, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8339368259370198e-05, |
|
"loss": 0.6841, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8313045439114855e-05, |
|
"loss": 0.6818, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.828653483590697e-05, |
|
"loss": 0.6982, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.825983704859319e-05, |
|
"loss": 0.6723, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.823295268024844e-05, |
|
"loss": 0.6741, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8205882338162334e-05, |
|
"loss": 0.6777, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8178626633825417e-05, |
|
"loss": 0.6828, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8151186182915384e-05, |
|
"loss": 0.6513, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8123561605283165e-05, |
|
"loss": 0.654, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8095753524938903e-05, |
|
"loss": 0.6938, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.806776257003789e-05, |
|
"loss": 0.6823, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.803958937286635e-05, |
|
"loss": 0.6744, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8011234569827173e-05, |
|
"loss": 0.679, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7982698801425543e-05, |
|
"loss": 0.698, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.795398271225445e-05, |
|
"loss": 0.6836, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7925086950980137e-05, |
|
"loss": 0.6787, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.789601217032747e-05, |
|
"loss": 0.6665, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.786675902706515e-05, |
|
"loss": 0.66, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.783732818199092e-05, |
|
"loss": 0.6881, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7807720299916617e-05, |
|
"loss": 0.6535, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.777793604965315e-05, |
|
"loss": 0.6471, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7747976103995403e-05, |
|
"loss": 0.6646, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.771784113970704e-05, |
|
"loss": 0.6314, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.76875318375052e-05, |
|
"loss": 0.6686, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.765704888204515e-05, |
|
"loss": 0.6157, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7626392961904787e-05, |
|
"loss": 0.7079, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7595564769569097e-05, |
|
"loss": 0.6686, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7564565001414523e-05, |
|
"loss": 0.6622, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7533394357693222e-05, |
|
"loss": 0.6542, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7502053542517245e-05, |
|
"loss": 0.6737, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7470543263842644e-05, |
|
"loss": 0.6702, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7438864233453476e-05, |
|
"loss": 0.6395, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.740701716694571e-05, |
|
"loss": 0.6588, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.737500278371108e-05, |
|
"loss": 0.6398, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.734282180692083e-05, |
|
"loss": 0.6493, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.731047496350938e-05, |
|
"loss": 0.6487, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.72779629841579e-05, |
|
"loss": 0.6574, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7245286603277804e-05, |
|
"loss": 0.6306, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.7212446558994163e-05, |
|
"loss": 0.6178, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.7179443593129042e-05, |
|
"loss": 0.6436, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.714627845118472e-05, |
|
"loss": 0.6632, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.7112951882326872e-05, |
|
"loss": 0.6403, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.7079464639367634e-05, |
|
"loss": 0.6807, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.70458174787486e-05, |
|
"loss": 0.6446, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7012011160523744e-05, |
|
"loss": 0.634, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6978046448342228e-05, |
|
"loss": 0.6809, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.694392410943118e-05, |
|
"loss": 0.6254, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.690964491457834e-05, |
|
"loss": 0.6472, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.687520963811467e-05, |
|
"loss": 0.6558, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6840619057896845e-05, |
|
"loss": 0.6424, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.680587395528968e-05, |
|
"loss": 0.6443, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6770975115148506e-05, |
|
"loss": 0.6575, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6735923325801407e-05, |
|
"loss": 0.6402, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.670071937903144e-05, |
|
"loss": 0.6604, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.666536407005874e-05, |
|
"loss": 0.6457, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6629858197522537e-05, |
|
"loss": 0.6121, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6594202563463152e-05, |
|
"loss": 0.6222, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.655839797330385e-05, |
|
"loss": 0.6632, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.652244523583267e-05, |
|
"loss": 0.6305, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.648634516318413e-05, |
|
"loss": 0.6426, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.64500985708209e-05, |
|
"loss": 0.6438, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6413706277515375e-05, |
|
"loss": 0.6365, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6377169105331185e-05, |
|
"loss": 0.6433, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.634048787960462e-05, |
|
"loss": 0.6328, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.630366342892598e-05, |
|
"loss": 0.6161, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6266696585120884e-05, |
|
"loss": 0.6344, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6229588183231436e-05, |
|
"loss": 0.6234, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6192339061497415e-05, |
|
"loss": 0.6374, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6154950061337295e-05, |
|
"loss": 0.6472, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6117422027329267e-05, |
|
"loss": 0.6094, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6079755807192138e-05, |
|
"loss": 0.5943, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.604195225176621e-05, |
|
"loss": 0.6191, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6004012214994036e-05, |
|
"loss": 0.6108, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5965936553901137e-05, |
|
"loss": 0.6543, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.592772612857665e-05, |
|
"loss": 0.6439, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5889381802153898e-05, |
|
"loss": 0.6041, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5850904440790872e-05, |
|
"loss": 0.5625, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5812294913650697e-05, |
|
"loss": 0.5727, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5773554092881986e-05, |
|
"loss": 0.5793, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5734682853599124e-05, |
|
"loss": 0.572, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5695682073862527e-05, |
|
"loss": 0.5536, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5656552634658776e-05, |
|
"loss": 0.5855, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.561729541988076e-05, |
|
"loss": 0.57, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5577911316307658e-05, |
|
"loss": 0.5916, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.553840121358495e-05, |
|
"loss": 0.5818, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5498766004204294e-05, |
|
"loss": 0.5852, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.545900658348338e-05, |
|
"loss": 0.5773, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.541912384954571e-05, |
|
"loss": 0.5863, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5379118703300285e-05, |
|
"loss": 0.6045, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.533899204842128e-05, |
|
"loss": 0.5633, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5298744791327632e-05, |
|
"loss": 0.5665, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5258377841162534e-05, |
|
"loss": 0.5866, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.5217892109772936e-05, |
|
"loss": 0.5643, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.5177288511688927e-05, |
|
"loss": 0.5773, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.5136567964103077e-05, |
|
"loss": 0.5645, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.5095731386849725e-05, |
|
"loss": 0.5798, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.5054779702384199e-05, |
|
"loss": 0.5377, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.5013713835761978e-05, |
|
"loss": 0.5762, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4972534714617792e-05, |
|
"loss": 0.5555, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4931243269144673e-05, |
|
"loss": 0.5759, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4889840432072947e-05, |
|
"loss": 0.5759, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4848327138649153e-05, |
|
"loss": 0.5947, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.480670432661492e-05, |
|
"loss": 0.5476, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4764972936185798e-05, |
|
"loss": 0.5819, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4723133910029998e-05, |
|
"loss": 0.5923, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4681188193247118e-05, |
|
"loss": 0.5393, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4639136733346777e-05, |
|
"loss": 0.5497, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4596980480227223e-05, |
|
"loss": 0.5612, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4554720386153872e-05, |
|
"loss": 0.5657, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4512357405737798e-05, |
|
"loss": 0.5774, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4469892495914174e-05, |
|
"loss": 0.5703, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4427326615920642e-05, |
|
"loss": 0.566, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4384660727275664e-05, |
|
"loss": 0.57, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4341895793756784e-05, |
|
"loss": 0.5528, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4299032781378865e-05, |
|
"loss": 0.5695, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.425607265837228e-05, |
|
"loss": 0.539, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.4213016395161017e-05, |
|
"loss": 0.5318, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.416986496434077e-05, |
|
"loss": 0.562, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.4126619340656981e-05, |
|
"loss": 0.5444, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.4083280500982797e-05, |
|
"loss": 0.551, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.4039849424297023e-05, |
|
"loss": 0.5726, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3996327091661996e-05, |
|
"loss": 0.5391, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3952714486201436e-05, |
|
"loss": 0.5638, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3909012593078224e-05, |
|
"loss": 0.5458, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3865222399472155e-05, |
|
"loss": 0.5432, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3821344894557653e-05, |
|
"loss": 0.5435, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3777381069481397e-05, |
|
"loss": 0.526, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3733331917339952e-05, |
|
"loss": 0.5597, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3689198433157334e-05, |
|
"loss": 0.5641, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3644981613862524e-05, |
|
"loss": 0.5445, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3600682458266972e-05, |
|
"loss": 0.5428, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3556301967041999e-05, |
|
"loss": 0.5693, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3511841142696222e-05, |
|
"loss": 0.5643, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.34673009895529e-05, |
|
"loss": 0.5534, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3422682513727246e-05, |
|
"loss": 0.568, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3377986723103694e-05, |
|
"loss": 0.5612, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.333321462731314e-05, |
|
"loss": 0.5436, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.328836723771014e-05, |
|
"loss": 0.5599, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.3243445567350048e-05, |
|
"loss": 0.5338, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.3198450630966153e-05, |
|
"loss": 0.5526, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.3153383444946736e-05, |
|
"loss": 0.5484, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.310824502731213e-05, |
|
"loss": 0.574, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.3063036397691708e-05, |
|
"loss": 0.5819, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.3017758577300863e-05, |
|
"loss": 0.5703, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2972412588917931e-05, |
|
"loss": 0.5207, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2926999456861099e-05, |
|
"loss": 0.5325, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2881520206965245e-05, |
|
"loss": 0.553, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2835975866558793e-05, |
|
"loss": 0.5561, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2790367464440484e-05, |
|
"loss": 0.5485, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2744696030856154e-05, |
|
"loss": 0.5312, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2698962597475446e-05, |
|
"loss": 0.5298, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.265316819736852e-05, |
|
"loss": 0.5316, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2607313864982698e-05, |
|
"loss": 0.5483, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2561400636119126e-05, |
|
"loss": 0.5588, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2515429547909349e-05, |
|
"loss": 0.5388, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2469401638791892e-05, |
|
"loss": 0.5223, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2423317948488814e-05, |
|
"loss": 0.5238, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.23771795179822e-05, |
|
"loss": 0.5034, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2330987389490671e-05, |
|
"loss": 0.52, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2284742606445818e-05, |
|
"loss": 0.5441, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2238446213468656e-05, |
|
"loss": 0.5618, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2192099256345999e-05, |
|
"loss": 0.5472, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.2145702782006863e-05, |
|
"loss": 0.5502, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.20992578384988e-05, |
|
"loss": 0.5259, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.2052765474964232e-05, |
|
"loss": 0.5492, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.200622674161675e-05, |
|
"loss": 0.5268, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.195964268971739e-05, |
|
"loss": 0.5498, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1913014371550881e-05, |
|
"loss": 0.534, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1866342840401892e-05, |
|
"loss": 0.5294, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1819629150531218e-05, |
|
"loss": 0.5479, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.177287435715198e-05, |
|
"loss": 0.5411, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1726079516405778e-05, |
|
"loss": 0.5548, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1679245685338847e-05, |
|
"loss": 0.5378, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1632373921878168e-05, |
|
"loss": 0.5503, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1585465284807575e-05, |
|
"loss": 0.5391, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1538520833743845e-05, |
|
"loss": 0.5496, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1491541629112746e-05, |
|
"loss": 0.5334, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1444528732125098e-05, |
|
"loss": 0.5329, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.139748320475279e-05, |
|
"loss": 0.5368, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1350406109704806e-05, |
|
"loss": 0.5294, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1303298510403206e-05, |
|
"loss": 0.5149, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.1256161470959107e-05, |
|
"loss": 0.5439, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.1208996056148646e-05, |
|
"loss": 0.5588, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.1161803331388942e-05, |
|
"loss": 0.555, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.1114584362714006e-05, |
|
"loss": 0.5328, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.1067340216750669e-05, |
|
"loss": 0.5317, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.1020071960694498e-05, |
|
"loss": 0.5289, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0972780662285683e-05, |
|
"loss": 0.549, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0925467389784905e-05, |
|
"loss": 0.5388, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.087813321194923e-05, |
|
"loss": 0.5412, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0830779198007943e-05, |
|
"loss": 0.5165, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0783406417638418e-05, |
|
"loss": 0.5354, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0736015940941928e-05, |
|
"loss": 0.5315, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0688608838419496e-05, |
|
"loss": 0.5309, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0641186180947709e-05, |
|
"loss": 0.5495, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0593749039754513e-05, |
|
"loss": 0.5096, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0546298486395033e-05, |
|
"loss": 0.5242, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0498835592727357e-05, |
|
"loss": 0.5238, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0451361430888336e-05, |
|
"loss": 0.5417, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0403877073269349e-05, |
|
"loss": 0.528, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0356383592492086e-05, |
|
"loss": 0.5333, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0308882061384325e-05, |
|
"loss": 0.5303, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0261373552955689e-05, |
|
"loss": 0.539, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0213859140373412e-05, |
|
"loss": 0.5355, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.0166339896938099e-05, |
|
"loss": 0.5039, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.0118816896059474e-05, |
|
"loss": 0.5284, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.0071291211232143e-05, |
|
"loss": 0.5241, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.0023763916011337e-05, |
|
"loss": 0.5059, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.976236083988664e-06, |
|
"loss": 0.5217, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.928708788767859e-06, |
|
"loss": 0.5185, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.881183103940527e-06, |
|
"loss": 0.5423, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.833660103061903e-06, |
|
"loss": 0.5428, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.786140859626593e-06, |
|
"loss": 0.5288, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.738626447044318e-06, |
|
"loss": 0.5138, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.691117938615679e-06, |
|
"loss": 0.5244, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.643616407507918e-06, |
|
"loss": 0.5294, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.596122926730655e-06, |
|
"loss": 0.5318, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.548638569111665e-06, |
|
"loss": 0.5053, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.501164407272643e-06, |
|
"loss": 0.5206, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.453701513604972e-06, |
|
"loss": 0.5063, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.406250960245494e-06, |
|
"loss": 0.5282, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.358813819052294e-06, |
|
"loss": 0.5103, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.311391161580507e-06, |
|
"loss": 0.5304, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.263984059058074e-06, |
|
"loss": 0.522, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.216593582361585e-06, |
|
"loss": 0.5309, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.169220801992057e-06, |
|
"loss": 0.5056, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.121866788050772e-06, |
|
"loss": 0.518, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.074532610215099e-06, |
|
"loss": 0.5185, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.027219337714324e-06, |
|
"loss": 0.5071, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.979928039305504e-06, |
|
"loss": 0.5475, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.932659783249333e-06, |
|
"loss": 0.5228, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.885415637285998e-06, |
|
"loss": 0.4934, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.838196668611058e-06, |
|
"loss": 0.5281, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.791003943851353e-06, |
|
"loss": 0.5234, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.743838529040898e-06, |
|
"loss": 0.5131, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.696701489596797e-06, |
|
"loss": 0.5246, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.649593890295196e-06, |
|
"loss": 0.5053, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.602516795247213e-06, |
|
"loss": 0.53, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.555471267874905e-06, |
|
"loss": 0.4878, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.508458370887255e-06, |
|
"loss": 0.5222, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.461479166256157e-06, |
|
"loss": 0.503, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.414534715192425e-06, |
|
"loss": 0.528, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.367626078121837e-06, |
|
"loss": 0.5078, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.320754314661158e-06, |
|
"loss": 0.5311, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.273920483594225e-06, |
|
"loss": 0.5151, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.227125642848023e-06, |
|
"loss": 0.5302, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.180370849468784e-06, |
|
"loss": 0.4945, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.133657159598108e-06, |
|
"loss": 0.5216, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.086985628449119e-06, |
|
"loss": 0.5356, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.040357310282615e-06, |
|
"loss": 0.5077, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.993773258383251e-06, |
|
"loss": 0.5098, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.947234525035772e-06, |
|
"loss": 0.5175, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.900742161501204e-06, |
|
"loss": 0.4935, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.854297217993138e-06, |
|
"loss": 0.5103, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.807900743654003e-06, |
|
"loss": 0.5116, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.761553786531346e-06, |
|
"loss": 0.5077, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.71525739355418e-06, |
|
"loss": 0.4915, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.669012610509334e-06, |
|
"loss": 0.4992, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.622820482017804e-06, |
|
"loss": 0.4955, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.57668205151119e-06, |
|
"loss": 0.5147, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.53059836120811e-06, |
|
"loss": 0.5267, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.484570452090655e-06, |
|
"loss": 0.5032, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.438599363880874e-06, |
|
"loss": 0.5167, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.392686135017302e-06, |
|
"loss": 0.525, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.346831802631486e-06, |
|
"loss": 0.5262, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.301037402524556e-06, |
|
"loss": 0.4958, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.2553039691438475e-06, |
|
"loss": 0.5097, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.209632535559519e-06, |
|
"loss": 0.5238, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.16402413344121e-06, |
|
"loss": 0.5069, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.118479793034758e-06, |
|
"loss": 0.497, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.073000543138904e-06, |
|
"loss": 0.5294, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.027587411082069e-06, |
|
"loss": 0.501, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.9822414226991405e-06, |
|
"loss": 0.5157, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.936963602308297e-06, |
|
"loss": 0.5141, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.891754972687873e-06, |
|
"loss": 0.5122, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.846616555053266e-06, |
|
"loss": 0.5152, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.801549369033851e-06, |
|
"loss": 0.5131, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.7565544326499535e-06, |
|
"loss": 0.5311, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.7116327622898634e-06, |
|
"loss": 0.5177, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.666785372686863e-06, |
|
"loss": 0.5123, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.62201327689631e-06, |
|
"loss": 0.4966, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.577317486272756e-06, |
|
"loss": 0.5268, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.532699010447101e-06, |
|
"loss": 0.5075, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.488158857303779e-06, |
|
"loss": 0.5256, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.443698032958003e-06, |
|
"loss": 0.5096, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.39931754173303e-06, |
|
"loss": 0.5072, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.355018386137475e-06, |
|
"loss": 0.4937, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.310801566842671e-06, |
|
"loss": 0.5039, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.266668082660053e-06, |
|
"loss": 0.5048, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.222618930518605e-06, |
|
"loss": 0.5063, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.1786551054423485e-06, |
|
"loss": 0.521, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.134777600527846e-06, |
|
"loss": 0.5088, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.09098740692178e-06, |
|
"loss": 0.4949, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.047285513798569e-06, |
|
"loss": 0.4994, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.0036729083380095e-06, |
|
"loss": 0.5304, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.960150575702981e-06, |
|
"loss": 0.4975, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.916719499017206e-06, |
|
"loss": 0.5023, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.873380659343022e-06, |
|
"loss": 0.5051, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.83013503565923e-06, |
|
"loss": 0.5074, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.786983604838984e-06, |
|
"loss": 0.5042, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.743927341627722e-06, |
|
"loss": 0.515, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.700967218621134e-06, |
|
"loss": 0.5167, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.658104206243222e-06, |
|
"loss": 0.515, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.615339272724338e-06, |
|
"loss": 0.4989, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.572673384079361e-06, |
|
"loss": 0.4998, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.53010750408583e-06, |
|
"loss": 0.5054, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.487642594262203e-06, |
|
"loss": 0.4986, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.445279613846132e-06, |
|
"loss": 0.4758, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.403019519772781e-06, |
|
"loss": 0.4857, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.360863266653228e-06, |
|
"loss": 0.4618, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.318811806752884e-06, |
|
"loss": 0.4636, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.2768660899700056e-06, |
|
"loss": 0.4645, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.235027063814206e-06, |
|
"loss": 0.4767, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.193295673385081e-06, |
|
"loss": 0.4747, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.151672861350849e-06, |
|
"loss": 0.4849, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.1101595679270566e-06, |
|
"loss": 0.451, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.068756730855329e-06, |
|
"loss": 0.463, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.027465285382213e-06, |
|
"loss": 0.4643, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.986286164238026e-06, |
|
"loss": 0.4529, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.945220297615806e-06, |
|
"loss": 0.4546, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.904268613150278e-06, |
|
"loss": 0.4704, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.863432035896924e-06, |
|
"loss": 0.4701, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.822711488311077e-06, |
|
"loss": 0.459, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.782107890227066e-06, |
|
"loss": 0.4901, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.74162215883747e-06, |
|
"loss": 0.4505, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.701255208672372e-06, |
|
"loss": 0.4671, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.661007951578722e-06, |
|
"loss": 0.46, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.620881296699719e-06, |
|
"loss": 0.4434, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.580876150454292e-06, |
|
"loss": 0.4759, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.540993416516618e-06, |
|
"loss": 0.4826, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.501233995795709e-06, |
|
"loss": 0.4427, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.461598786415052e-06, |
|
"loss": 0.4797, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.4220886836923445e-06, |
|
"loss": 0.468, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.382704580119242e-06, |
|
"loss": 0.4662, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.343447365341226e-06, |
|
"loss": 0.4662, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.304317926137478e-06, |
|
"loss": 0.4646, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.265317146400877e-06, |
|
"loss": 0.4503, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.2264459071180184e-06, |
|
"loss": 0.4462, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.187705086349304e-06, |
|
"loss": 0.4583, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.149095559209133e-06, |
|
"loss": 0.4626, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.110618197846105e-06, |
|
"loss": 0.4765, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.072273871423348e-06, |
|
"loss": 0.4756, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.034063446098864e-06, |
|
"loss": 0.4585, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.995987785005966e-06, |
|
"loss": 0.4522, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.95804774823379e-06, |
|
"loss": 0.483, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.920244192807865e-06, |
|
"loss": 0.4611, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.882577972670737e-06, |
|
"loss": 0.4765, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.845049938662709e-06, |
|
"loss": 0.4593, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.8076609385025886e-06, |
|
"loss": 0.4595, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.7704118167685676e-06, |
|
"loss": 0.4544, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.7333034148791213e-06, |
|
"loss": 0.4599, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6963365710740195e-06, |
|
"loss": 0.4395, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.659512120395384e-06, |
|
"loss": 0.4537, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.6228308946688163e-06, |
|
"loss": 0.4688, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5862937224846284e-06, |
|
"loss": 0.4473, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.5499014291791035e-06, |
|
"loss": 0.471, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.513654836815872e-06, |
|
"loss": 0.4565, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.4775547641673337e-06, |
|
"loss": 0.4836, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.4416020266961503e-06, |
|
"loss": 0.4517, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.4057974365368496e-06, |
|
"loss": 0.4636, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.3701418024774656e-06, |
|
"loss": 0.4647, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.3346359299412624e-06, |
|
"loss": 0.47, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.29928062096856e-06, |
|
"loss": 0.46, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.264076674198594e-06, |
|
"loss": 0.4835, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.2290248848514993e-06, |
|
"loss": 0.445, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.1941260447103228e-06, |
|
"loss": 0.4665, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.1593809421031583e-06, |
|
"loss": 0.4562, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.1247903618853326e-06, |
|
"loss": 0.4582, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0903550854216603e-06, |
|
"loss": 0.4762, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.056075890568825e-06, |
|
"loss": 0.4547, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.021953551657774e-06, |
|
"loss": 0.455, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.987988839476258e-06, |
|
"loss": 0.4503, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.9541825212514007e-06, |
|
"loss": 0.4707, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.9205353606323683e-06, |
|
"loss": 0.4766, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.887048117673129e-06, |
|
"loss": 0.4504, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8537215488152837e-06, |
|
"loss": 0.4499, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.82055640687096e-06, |
|
"loss": 0.4515, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7875534410058392e-06, |
|
"loss": 0.4629, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.7547133967221985e-06, |
|
"loss": 0.4722, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.722037015842103e-06, |
|
"loss": 0.4527, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.68952503649062e-06, |
|
"loss": 0.4759, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.6571781930791706e-06, |
|
"loss": 0.4682, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.624997216288925e-06, |
|
"loss": 0.454, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.592982833054294e-06, |
|
"loss": 0.4675, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5611357665465254e-06, |
|
"loss": 0.4704, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.529456736157356e-06, |
|
"loss": 0.4659, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.497946457482756e-06, |
|
"loss": 0.48, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4666056423067832e-06, |
|
"loss": 0.4611, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.43543499858548e-06, |
|
"loss": 0.4457, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.404435230430905e-06, |
|
"loss": 0.4351, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.373607038095217e-06, |
|
"loss": 0.4495, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3429511179548504e-06, |
|
"loss": 0.4407, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.312468162494801e-06, |
|
"loss": 0.4444, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2821588602929635e-06, |
|
"loss": 0.4464, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2520238960046016e-06, |
|
"loss": 0.4663, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.2220639503468542e-06, |
|
"loss": 0.4665, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.192279700083385e-06, |
|
"loss": 0.4499, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1626718180090824e-06, |
|
"loss": 0.4459, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1332409729348526e-06, |
|
"loss": 0.4595, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.1039878296725335e-06, |
|
"loss": 0.4736, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0749130490198655e-06, |
|
"loss": 0.4537, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0460172877455543e-06, |
|
"loss": 0.4678, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.0173011985744608e-06, |
|
"loss": 0.45, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9887654301728287e-06, |
|
"loss": 0.4612, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.960410627133654e-06, |
|
"loss": 0.4356, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.932237429962116e-06, |
|
"loss": 0.4603, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.9042464750610989e-06, |
|
"loss": 0.466, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8764383947168386e-06, |
|
"loss": 0.4742, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8488138170846148e-06, |
|
"loss": 0.4536, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8213733661745858e-06, |
|
"loss": 0.4578, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7941176618376687e-06, |
|
"loss": 0.4605, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7670473197515592e-06, |
|
"loss": 0.442, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.740162951406812e-06, |
|
"loss": 0.4615, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.7134651640930311e-06, |
|
"loss": 0.4639, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6869545608851467e-06, |
|
"loss": 0.4669, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6606317406298045e-06, |
|
"loss": 0.4464, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.634497297931823e-06, |
|
"loss": 0.4569, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6085518231407781e-06, |
|
"loss": 0.4656, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5827959023376505e-06, |
|
"loss": 0.4674, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.557230117321602e-06, |
|
"loss": 0.4457, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5318550455968285e-06, |
|
"loss": 0.4582, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5066712603595045e-06, |
|
"loss": 0.4576, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4816793304848543e-06, |
|
"loss": 0.4612, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.456879820514282e-06, |
|
"loss": 0.4644, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4322732906426362e-06, |
|
"loss": 0.4727, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.407860296705542e-06, |
|
"loss": 0.465, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3836413901668566e-06, |
|
"loss": 0.4584, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3596171181062024e-06, |
|
"loss": 0.4485, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.335788023206619e-06, |
|
"loss": 0.4651, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3121546437422916e-06, |
|
"loss": 0.4701, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2887175135664088e-06, |
|
"loss": 0.4623, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2654771620990846e-06, |
|
"loss": 0.4527, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2424341143154173e-06, |
|
"loss": 0.4668, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2195888907336162e-06, |
|
"loss": 0.44, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1969420074032535e-06, |
|
"loss": 0.4495, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1744939758936047e-06, |
|
"loss": 0.4632, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1522453032820868e-06, |
|
"loss": 0.4676, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1301964921428165e-06, |
|
"loss": 0.4519, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.108348040535242e-06, |
|
"loss": 0.4576, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0867004419929061e-06, |
|
"loss": 0.4588, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0652541855122888e-06, |
|
"loss": 0.4522, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.044009755541766e-06, |
|
"loss": 0.457, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0229676319706671e-06, |
|
"loss": 0.4461, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0021282901184315e-06, |
|
"loss": 0.4458, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.814922007238692e-07, |
|
"loss": 0.4668, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.610598299345364e-07, |
|
"loss": 0.4501, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.408316392961947e-07, |
|
"loss": 0.4722, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.208080857423985e-07, |
|
"loss": 0.4508, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.009896215841562e-07, |
|
"loss": 0.4447, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.813766944997293e-07, |
|
"loss": 0.4468, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.619697475245136e-07, |
|
"loss": 0.4481, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.427692190410252e-07, |
|
"loss": 0.4752, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.237755427690097e-07, |
|
"loss": 0.4443, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.049891477556326e-07, |
|
"loss": 0.4465, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.864104583657995e-07, |
|
"loss": 0.4449, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.680398942725609e-07, |
|
"loss": 0.4734, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.498778704476373e-07, |
|
"loss": 0.4624, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.319247971520427e-07, |
|
"loss": 0.446, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.141810799268222e-07, |
|
"loss": 0.4671, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.966471195838808e-07, |
|
"loss": 0.4666, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.793233121969422e-07, |
|
"loss": 0.4609, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.62210049092592e-07, |
|
"loss": 0.446, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.453077168414456e-07, |
|
"loss": 0.4537, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.286166972494079e-07, |
|
"loss": 0.4599, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.121373673490549e-07, |
|
"loss": 0.4911, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.958700993911193e-07, |
|
"loss": 0.4722, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.798152608360696e-07, |
|
"loss": 0.4431, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.639732143458254e-07, |
|
"loss": 0.4388, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.483443177755499e-07, |
|
"loss": 0.4504, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.329289241655811e-07, |
|
"loss": 0.4663, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.177273817334439e-07, |
|
"loss": 0.4538, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.027400338659926e-07, |
|
"loss": 0.4784, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.879672191116524e-07, |
|
"loss": 0.462, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.734092711727711e-07, |
|
"loss": 0.4539, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.5906651889807697e-07, |
|
"loss": 0.4435, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.4493928627526105e-07, |
|
"loss": 0.47, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.3102789242364553e-07, |
|
"loss": 0.4485, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.17332651586988e-07, |
|
"loss": 0.4702, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.0385387312637194e-07, |
|
"loss": 0.4593, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.9059186151322534e-07, |
|
"loss": 0.4618, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.7754691632244323e-07, |
|
"loss": 0.4635, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.6471933222561375e-07, |
|
"loss": 0.474, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.521093989843716e-07, |
|
"loss": 0.4544, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.397174014438431e-07, |
|
"loss": 0.4794, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.2754361952621936e-07, |
|
"loss": 0.4599, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.1558832822442874e-07, |
|
"loss": 0.45, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.0385179759592767e-07, |
|
"loss": 0.4594, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.9233429275659643e-07, |
|
"loss": 0.4653, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.8103607387475753e-07, |
|
"loss": 0.4578, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.6995739616528905e-07, |
|
"loss": 0.4616, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.5909850988386943e-07, |
|
"loss": 0.4624, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.484596603213163e-07, |
|
"loss": 0.4433, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.3804108779805325e-07, |
|
"loss": 0.4527, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.2784302765867294e-07, |
|
"loss": 0.4542, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.1786571026662705e-07, |
|
"loss": 0.4598, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0810936099902368e-07, |
|
"loss": 0.4448, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.9857420024152808e-07, |
|
"loss": 0.4591, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8926044338339332e-07, |
|
"loss": 0.4621, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8016830081259095e-07, |
|
"loss": 0.458, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.7129797791105686e-07, |
|
"loss": 0.4634, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.6264967505005613e-07, |
|
"loss": 0.464, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.5422358758565348e-07, |
|
"loss": 0.4584, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4601990585430214e-07, |
|
"loss": 0.4653, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.3803881516854412e-07, |
|
"loss": 0.4502, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.3028049581282231e-07, |
|
"loss": 0.4696, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2274512303941165e-07, |
|
"loss": 0.4537, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1543286706445556e-07, |
|
"loss": 0.4587, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.0834389306412674e-07, |
|
"loss": 0.4499, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.0147836117088916e-07, |
|
"loss": 0.4561, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.483642646988978e-08, |
|
"loss": 0.4674, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.841823899544577e-08, |
|
"loss": 0.4561, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.222394372766173e-08, |
|
"loss": 0.4509, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.625368058915228e-08, |
|
"loss": 0.4643, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.050758444188122e-08, |
|
"loss": 0.4575, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.498578508411734e-08, |
|
"loss": 0.4636, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 5.968840724750013e-08, |
|
"loss": 0.4636, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.4615570594223066e-08, |
|
"loss": 0.4661, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.976738971433026e-08, |
|
"loss": 0.4538, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.5143974123129655e-08, |
|
"loss": 0.4651, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.074542825871275e-08, |
|
"loss": 0.4544, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.6571851479607624e-08, |
|
"loss": 0.4608, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.262333806252294e-08, |
|
"loss": 0.4706, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.889997720022297e-08, |
|
"loss": 0.4496, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.540185299951259e-08, |
|
"loss": 0.4655, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.2129044479339833e-08, |
|
"loss": 0.4512, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.908162556900628e-08, |
|
"loss": 0.453, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.6259665106498346e-08, |
|
"loss": 0.4491, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.3663226836936327e-08, |
|
"loss": 0.4554, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.1292369411127768e-08, |
|
"loss": 0.4531, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.147146384250739e-09, |
|
"loss": 0.4548, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.227606214635918e-09, |
|
"loss": 0.4687, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.5337922626752526e-09, |
|
"loss": 0.4641, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.065742789846061e-09, |
|
"loss": 0.4636, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 2.8234909578417347e-09, |
|
"loss": 0.4611, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.807064827823446e-09, |
|
"loss": 0.4629, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.016487359789542e-09, |
|
"loss": 0.4562, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.5177641205262915e-10, |
|
"loss": 0.4709, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.1294474083878293e-10, |
|
"loss": 0.4708, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 0.0, |
|
"loss": 0.443, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"step": 696, |
|
"total_flos": 1196603910258688.0, |
|
"train_loss": 0.6090522945012854, |
|
"train_runtime": 52554.8084, |
|
"train_samples_per_second": 1.698, |
|
"train_steps_per_second": 0.013 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 696, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 400, |
|
"total_flos": 1196603910258688.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|