|
{ |
|
"best_metric": 1.4266291856765747, |
|
"best_model_checkpoint": "/home/co-ou1/rds/hpc-work/models/bart_large_gov/checkpoint-1227", |
|
"epoch": 19.935868071461293, |
|
"eval_steps": 500, |
|
"global_step": 2720, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.996323529411765e-05, |
|
"loss": 3.1458, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.99264705882353e-05, |
|
"loss": 2.5311, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9889705882352945e-05, |
|
"loss": 2.3371, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.985294117647059e-05, |
|
"loss": 2.119, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9816176470588236e-05, |
|
"loss": 2.0765, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.977941176470589e-05, |
|
"loss": 1.9836, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9742647058823534e-05, |
|
"loss": 2.0458, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.970588235294118e-05, |
|
"loss": 1.9844, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9669117647058824e-05, |
|
"loss": 1.9514, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9632352941176476e-05, |
|
"loss": 1.8552, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.959558823529412e-05, |
|
"loss": 1.8923, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.955882352941177e-05, |
|
"loss": 1.8408, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.952205882352941e-05, |
|
"loss": 1.8763, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9485294117647065e-05, |
|
"loss": 1.8369, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.944852941176471e-05, |
|
"loss": 1.8329, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9411764705882355e-05, |
|
"loss": 1.865, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.937500000000001e-05, |
|
"loss": 1.8123, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.933823529411765e-05, |
|
"loss": 1.8076, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.93014705882353e-05, |
|
"loss": 1.8127, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.9264705882352944e-05, |
|
"loss": 1.7886, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9227941176470596e-05, |
|
"loss": 1.7742, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9191176470588234e-05, |
|
"loss": 1.8307, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.915441176470588e-05, |
|
"loss": 1.8065, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.911764705882353e-05, |
|
"loss": 1.8092, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.908088235294118e-05, |
|
"loss": 1.7918, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.904411764705882e-05, |
|
"loss": 1.7952, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.900735294117647e-05, |
|
"loss": 1.7627, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.897058823529412e-05, |
|
"loss": 1.745, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.8933823529411766e-05, |
|
"loss": 1.7852, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.889705882352941e-05, |
|
"loss": 1.7929, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.886029411764706e-05, |
|
"loss": 1.7667, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.882352941176471e-05, |
|
"loss": 1.7801, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.8786764705882354e-05, |
|
"loss": 1.7506, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.875e-05, |
|
"loss": 1.7355, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.871323529411765e-05, |
|
"loss": 1.7368, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.86764705882353e-05, |
|
"loss": 1.7198, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.863970588235294e-05, |
|
"loss": 1.732, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.860294117647059e-05, |
|
"loss": 1.7899, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.856617647058824e-05, |
|
"loss": 1.7788, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.8529411764705885e-05, |
|
"loss": 1.7476, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.849264705882353e-05, |
|
"loss": 1.7499, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.8455882352941176e-05, |
|
"loss": 1.7246, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.841911764705883e-05, |
|
"loss": 1.7551, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.838235294117647e-05, |
|
"loss": 1.7388, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.834558823529412e-05, |
|
"loss": 1.7604, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.830882352941177e-05, |
|
"loss": 1.6976, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.8272058823529416e-05, |
|
"loss": 1.7179, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.823529411764706e-05, |
|
"loss": 1.6908, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.819852941176471e-05, |
|
"loss": 1.7271, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.816176470588236e-05, |
|
"loss": 1.7638, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.8125000000000004e-05, |
|
"loss": 1.7888, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.808823529411765e-05, |
|
"loss": 1.7234, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.8051470588235295e-05, |
|
"loss": 1.6952, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.801470588235295e-05, |
|
"loss": 1.6991, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.797794117647059e-05, |
|
"loss": 1.7197, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.794117647058824e-05, |
|
"loss": 1.721, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.790441176470588e-05, |
|
"loss": 1.6757, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.7867647058823535e-05, |
|
"loss": 1.6877, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.783088235294118e-05, |
|
"loss": 1.6713, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.7794117647058826e-05, |
|
"loss": 1.6566, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.775735294117648e-05, |
|
"loss": 1.7646, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.7720588235294124e-05, |
|
"loss": 1.698, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.768382352941177e-05, |
|
"loss": 1.6811, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.7647058823529414e-05, |
|
"loss": 1.6475, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.761029411764706e-05, |
|
"loss": 1.7184, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.7573529411764705e-05, |
|
"loss": 1.6903, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.753676470588235e-05, |
|
"loss": 1.696, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.75e-05, |
|
"loss": 1.7352, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_gen_len": 685.9300411522634, |
|
"eval_loss": 1.5224188566207886, |
|
"eval_rouge1": 72.0472, |
|
"eval_rouge2": 41.3267, |
|
"eval_rougeL": 36.4817, |
|
"eval_rougeLsum": 69.4011, |
|
"eval_runtime": 1507.0932, |
|
"eval_samples_per_second": 0.645, |
|
"eval_steps_per_second": 0.081, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.746323529411765e-05, |
|
"loss": 1.6969, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.742647058823529e-05, |
|
"loss": 1.6445, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.738970588235294e-05, |
|
"loss": 1.6552, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.735294117647059e-05, |
|
"loss": 1.6619, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.7316176470588236e-05, |
|
"loss": 1.6811, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.727941176470588e-05, |
|
"loss": 1.6565, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.7242647058823534e-05, |
|
"loss": 1.6777, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.720588235294118e-05, |
|
"loss": 1.6993, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.7169117647058824e-05, |
|
"loss": 1.6585, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.713235294117647e-05, |
|
"loss": 1.6274, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.709558823529412e-05, |
|
"loss": 1.6659, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.705882352941177e-05, |
|
"loss": 1.6835, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.702205882352941e-05, |
|
"loss": 1.6414, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.698529411764706e-05, |
|
"loss": 1.6614, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.694852941176471e-05, |
|
"loss": 1.6693, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.6911764705882356e-05, |
|
"loss": 1.6631, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.6875e-05, |
|
"loss": 1.6745, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.6838235294117646e-05, |
|
"loss": 1.642, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.68014705882353e-05, |
|
"loss": 1.6089, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.6764705882352944e-05, |
|
"loss": 1.6192, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.672794117647059e-05, |
|
"loss": 1.5909, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.669117647058824e-05, |
|
"loss": 1.679, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.665441176470589e-05, |
|
"loss": 1.6633, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.661764705882353e-05, |
|
"loss": 1.6667, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.658088235294118e-05, |
|
"loss": 1.5912, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.654411764705883e-05, |
|
"loss": 1.6803, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.6507352941176475e-05, |
|
"loss": 1.5855, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.647058823529412e-05, |
|
"loss": 1.6265, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.6433823529411766e-05, |
|
"loss": 1.6296, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.639705882352942e-05, |
|
"loss": 1.6503, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.636029411764706e-05, |
|
"loss": 1.6359, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.632352941176471e-05, |
|
"loss": 1.5901, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.6286764705882354e-05, |
|
"loss": 1.6452, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.6250000000000006e-05, |
|
"loss": 1.6119, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.621323529411765e-05, |
|
"loss": 1.6387, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.61764705882353e-05, |
|
"loss": 1.6565, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.613970588235295e-05, |
|
"loss": 1.6407, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.6102941176470594e-05, |
|
"loss": 1.5864, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.606617647058823e-05, |
|
"loss": 1.5997, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.6029411764705885e-05, |
|
"loss": 1.6041, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.599264705882353e-05, |
|
"loss": 1.6505, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.5955882352941176e-05, |
|
"loss": 1.6734, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.591911764705882e-05, |
|
"loss": 1.5972, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.588235294117647e-05, |
|
"loss": 1.6595, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.584558823529412e-05, |
|
"loss": 1.5991, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.5808823529411764e-05, |
|
"loss": 1.6267, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.577205882352941e-05, |
|
"loss": 1.6275, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.573529411764706e-05, |
|
"loss": 1.6901, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.569852941176471e-05, |
|
"loss": 1.635, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.566176470588235e-05, |
|
"loss": 1.6775, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.5625e-05, |
|
"loss": 1.6446, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.558823529411765e-05, |
|
"loss": 1.6315, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.5551470588235295e-05, |
|
"loss": 1.6885, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.551470588235294e-05, |
|
"loss": 1.6688, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.547794117647059e-05, |
|
"loss": 1.6464, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.544117647058824e-05, |
|
"loss": 1.6047, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.5404411764705883e-05, |
|
"loss": 1.6206, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.536764705882353e-05, |
|
"loss": 1.6146, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.533088235294118e-05, |
|
"loss": 1.6104, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.5294117647058826e-05, |
|
"loss": 1.618, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.525735294117647e-05, |
|
"loss": 1.6014, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.522058823529412e-05, |
|
"loss": 1.6227, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.518382352941177e-05, |
|
"loss": 1.5916, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.5147058823529415e-05, |
|
"loss": 1.6464, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.511029411764706e-05, |
|
"loss": 1.6178, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.507352941176471e-05, |
|
"loss": 1.6777, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.503676470588236e-05, |
|
"loss": 1.6599, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.6874, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"eval_gen_len": 699.4866255144033, |
|
"eval_loss": 1.4778804779052734, |
|
"eval_rouge1": 71.7737, |
|
"eval_rouge2": 40.8546, |
|
"eval_rougeL": 36.8472, |
|
"eval_rougeLsum": 69.2034, |
|
"eval_runtime": 1548.1543, |
|
"eval_samples_per_second": 0.628, |
|
"eval_steps_per_second": 0.079, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.496323529411765e-05, |
|
"loss": 1.5938, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.49264705882353e-05, |
|
"loss": 1.6166, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.4889705882352946e-05, |
|
"loss": 1.5896, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.485294117647059e-05, |
|
"loss": 1.5819, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.4816176470588236e-05, |
|
"loss": 1.5363, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.477941176470589e-05, |
|
"loss": 1.6237, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.4742647058823534e-05, |
|
"loss": 1.5904, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.470588235294118e-05, |
|
"loss": 1.5691, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.4669117647058825e-05, |
|
"loss": 1.5736, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.463235294117648e-05, |
|
"loss": 1.5828, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.459558823529412e-05, |
|
"loss": 1.626, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.455882352941177e-05, |
|
"loss": 1.5758, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.452205882352941e-05, |
|
"loss": 1.5686, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.448529411764706e-05, |
|
"loss": 1.594, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.4448529411764704e-05, |
|
"loss": 1.605, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.4411764705882356e-05, |
|
"loss": 1.5628, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.4375e-05, |
|
"loss": 1.5738, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.4338235294117647e-05, |
|
"loss": 1.5813, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.430147058823529e-05, |
|
"loss": 1.558, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.4264705882352944e-05, |
|
"loss": 1.5974, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.422794117647059e-05, |
|
"loss": 1.582, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.4191176470588235e-05, |
|
"loss": 1.5758, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.415441176470588e-05, |
|
"loss": 1.5467, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.411764705882353e-05, |
|
"loss": 1.5621, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.408088235294118e-05, |
|
"loss": 1.5402, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.404411764705882e-05, |
|
"loss": 1.6221, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.400735294117647e-05, |
|
"loss": 1.5728, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.397058823529412e-05, |
|
"loss": 1.5801, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.3933823529411766e-05, |
|
"loss": 1.6026, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.389705882352941e-05, |
|
"loss": 1.623, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.3860294117647063e-05, |
|
"loss": 1.5657, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.382352941176471e-05, |
|
"loss": 1.5637, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.3786764705882354e-05, |
|
"loss": 1.6006, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.375e-05, |
|
"loss": 1.6032, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.371323529411765e-05, |
|
"loss": 1.5816, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.36764705882353e-05, |
|
"loss": 1.5769, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.363970588235294e-05, |
|
"loss": 1.6042, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.360294117647059e-05, |
|
"loss": 1.5442, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.356617647058824e-05, |
|
"loss": 1.5834, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.3529411764705885e-05, |
|
"loss": 1.5664, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.349264705882353e-05, |
|
"loss": 1.5611, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.345588235294118e-05, |
|
"loss": 1.5661, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.341911764705883e-05, |
|
"loss": 1.5466, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.3382352941176474e-05, |
|
"loss": 1.5953, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.334558823529412e-05, |
|
"loss": 1.5733, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.330882352941177e-05, |
|
"loss": 1.5854, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.3272058823529416e-05, |
|
"loss": 1.615, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.323529411764706e-05, |
|
"loss": 1.5459, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.319852941176471e-05, |
|
"loss": 1.5725, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.316176470588236e-05, |
|
"loss": 1.5714, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.3125000000000005e-05, |
|
"loss": 1.6121, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.308823529411765e-05, |
|
"loss": 1.5585, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.3051470588235295e-05, |
|
"loss": 1.5815, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.301470588235295e-05, |
|
"loss": 1.5459, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.297794117647059e-05, |
|
"loss": 1.5533, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.294117647058823e-05, |
|
"loss": 1.6328, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.2904411764705884e-05, |
|
"loss": 1.5658, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.286764705882353e-05, |
|
"loss": 1.5903, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.2830882352941174e-05, |
|
"loss": 1.5538, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.2794117647058827e-05, |
|
"loss": 1.5852, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.275735294117647e-05, |
|
"loss": 1.5633, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.272058823529412e-05, |
|
"loss": 1.5322, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.268382352941176e-05, |
|
"loss": 1.649, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.2647058823529415e-05, |
|
"loss": 1.5893, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.261029411764706e-05, |
|
"loss": 1.5398, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.2573529411764706e-05, |
|
"loss": 1.5964, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.253676470588235e-05, |
|
"loss": 1.5827, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.25e-05, |
|
"loss": 1.5695, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_gen_len": 695.0977366255144, |
|
"eval_loss": 1.4583299160003662, |
|
"eval_rouge1": 72.2243, |
|
"eval_rouge2": 41.372, |
|
"eval_rougeL": 37.8382, |
|
"eval_rougeLsum": 69.6295, |
|
"eval_runtime": 1521.0423, |
|
"eval_samples_per_second": 0.639, |
|
"eval_steps_per_second": 0.08, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.246323529411765e-05, |
|
"loss": 1.6104, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.2426470588235294e-05, |
|
"loss": 1.5129, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.238970588235294e-05, |
|
"loss": 1.5183, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.235294117647059e-05, |
|
"loss": 1.4872, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.231617647058824e-05, |
|
"loss": 1.54, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.227941176470588e-05, |
|
"loss": 1.4846, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.2242647058823534e-05, |
|
"loss": 1.5372, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.220588235294118e-05, |
|
"loss": 1.5029, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.2169117647058825e-05, |
|
"loss": 1.5842, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.213235294117647e-05, |
|
"loss": 1.5388, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.209558823529412e-05, |
|
"loss": 1.5548, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.205882352941177e-05, |
|
"loss": 1.5404, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.202205882352941e-05, |
|
"loss": 1.5176, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.198529411764706e-05, |
|
"loss": 1.5236, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.194852941176471e-05, |
|
"loss": 1.5629, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.1911764705882356e-05, |
|
"loss": 1.5498, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.1875e-05, |
|
"loss": 1.5055, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.1838235294117654e-05, |
|
"loss": 1.5415, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.18014705882353e-05, |
|
"loss": 1.5641, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.1764705882352944e-05, |
|
"loss": 1.5706, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.172794117647059e-05, |
|
"loss": 1.5415, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.169117647058824e-05, |
|
"loss": 1.5429, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.165441176470589e-05, |
|
"loss": 1.5656, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.161764705882353e-05, |
|
"loss": 1.5368, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.158088235294118e-05, |
|
"loss": 1.5456, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.154411764705883e-05, |
|
"loss": 1.5175, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.1507352941176475e-05, |
|
"loss": 1.4974, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.147058823529412e-05, |
|
"loss": 1.5959, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.1433823529411766e-05, |
|
"loss": 1.5371, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.139705882352941e-05, |
|
"loss": 1.5161, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.136029411764706e-05, |
|
"loss": 1.5434, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.13235294117647e-05, |
|
"loss": 1.5237, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.1286764705882354e-05, |
|
"loss": 1.5675, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.125e-05, |
|
"loss": 1.5648, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.1213235294117645e-05, |
|
"loss": 1.5291, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.11764705882353e-05, |
|
"loss": 1.5356, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.113970588235294e-05, |
|
"loss": 1.5438, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.110294117647059e-05, |
|
"loss": 1.5403, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.1066176470588233e-05, |
|
"loss": 1.5521, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.1029411764705886e-05, |
|
"loss": 1.5517, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.099264705882353e-05, |
|
"loss": 1.5388, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.0955882352941176e-05, |
|
"loss": 1.5106, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.091911764705882e-05, |
|
"loss": 1.4889, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.0882352941176474e-05, |
|
"loss": 1.5282, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.084558823529412e-05, |
|
"loss": 1.5273, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.0808823529411765e-05, |
|
"loss": 1.5599, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.077205882352941e-05, |
|
"loss": 1.5173, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.073529411764706e-05, |
|
"loss": 1.5329, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 4.069852941176471e-05, |
|
"loss": 1.5685, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 4.066176470588235e-05, |
|
"loss": 1.584, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.0625000000000005e-05, |
|
"loss": 1.5307, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.058823529411765e-05, |
|
"loss": 1.5374, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.0551470588235296e-05, |
|
"loss": 1.5278, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 4.051470588235294e-05, |
|
"loss": 1.5813, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.047794117647059e-05, |
|
"loss": 1.4977, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 4.044117647058824e-05, |
|
"loss": 1.5187, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.0404411764705884e-05, |
|
"loss": 1.5442, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.036764705882353e-05, |
|
"loss": 1.529, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.033088235294118e-05, |
|
"loss": 1.5556, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 4.029411764705883e-05, |
|
"loss": 1.5494, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.025735294117647e-05, |
|
"loss": 1.5284, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.0220588235294124e-05, |
|
"loss": 1.5657, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.018382352941177e-05, |
|
"loss": 1.5148, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.0147058823529415e-05, |
|
"loss": 1.5335, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.011029411764706e-05, |
|
"loss": 1.5635, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.007352941176471e-05, |
|
"loss": 1.5113, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.003676470588236e-05, |
|
"loss": 1.4989, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4e-05, |
|
"loss": 1.4951, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"eval_gen_len": 753.59670781893, |
|
"eval_loss": 1.4494504928588867, |
|
"eval_rouge1": 71.5808, |
|
"eval_rouge2": 40.5556, |
|
"eval_rougeL": 37.152, |
|
"eval_rougeLsum": 69.0536, |
|
"eval_runtime": 1658.1467, |
|
"eval_samples_per_second": 0.586, |
|
"eval_steps_per_second": 0.074, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.996323529411765e-05, |
|
"loss": 1.491, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.99264705882353e-05, |
|
"loss": 1.5411, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.9889705882352946e-05, |
|
"loss": 1.4722, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 3.985294117647059e-05, |
|
"loss": 1.543, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.981617647058824e-05, |
|
"loss": 1.5291, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.977941176470588e-05, |
|
"loss": 1.5114, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.974264705882353e-05, |
|
"loss": 1.455, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.970588235294117e-05, |
|
"loss": 1.5099, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.9669117647058825e-05, |
|
"loss": 1.435, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.963235294117647e-05, |
|
"loss": 1.5318, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.9595588235294116e-05, |
|
"loss": 1.459, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.955882352941177e-05, |
|
"loss": 1.5056, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.952205882352941e-05, |
|
"loss": 1.5043, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.948529411764706e-05, |
|
"loss": 1.4988, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.9448529411764704e-05, |
|
"loss": 1.4962, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.9411764705882356e-05, |
|
"loss": 1.4977, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.9375e-05, |
|
"loss": 1.5485, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.933823529411765e-05, |
|
"loss": 1.4396, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.930147058823529e-05, |
|
"loss": 1.5226, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.9264705882352945e-05, |
|
"loss": 1.4878, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.922794117647059e-05, |
|
"loss": 1.5179, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.9191176470588235e-05, |
|
"loss": 1.5323, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.915441176470588e-05, |
|
"loss": 1.4906, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.911764705882353e-05, |
|
"loss": 1.4704, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.908088235294118e-05, |
|
"loss": 1.4559, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.9044117647058823e-05, |
|
"loss": 1.5595, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.9007352941176476e-05, |
|
"loss": 1.5314, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.897058823529412e-05, |
|
"loss": 1.4872, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.8933823529411766e-05, |
|
"loss": 1.5049, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.889705882352941e-05, |
|
"loss": 1.4839, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.8860294117647064e-05, |
|
"loss": 1.5166, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.882352941176471e-05, |
|
"loss": 1.4871, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.8786764705882355e-05, |
|
"loss": 1.5005, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.875e-05, |
|
"loss": 1.4969, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.871323529411765e-05, |
|
"loss": 1.5049, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 3.86764705882353e-05, |
|
"loss": 1.5228, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.863970588235294e-05, |
|
"loss": 1.4748, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.8602941176470595e-05, |
|
"loss": 1.5033, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.856617647058824e-05, |
|
"loss": 1.4935, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.8529411764705886e-05, |
|
"loss": 1.4777, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.849264705882353e-05, |
|
"loss": 1.4853, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.845588235294118e-05, |
|
"loss": 1.5046, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.841911764705883e-05, |
|
"loss": 1.5109, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.8382352941176474e-05, |
|
"loss": 1.5174, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.834558823529412e-05, |
|
"loss": 1.465, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.830882352941177e-05, |
|
"loss": 1.5373, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.827205882352941e-05, |
|
"loss": 1.4863, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.8235294117647055e-05, |
|
"loss": 1.4979, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.819852941176471e-05, |
|
"loss": 1.5164, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.816176470588235e-05, |
|
"loss": 1.5135, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.8125e-05, |
|
"loss": 1.5337, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.8088235294117644e-05, |
|
"loss": 1.4749, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.8051470588235296e-05, |
|
"loss": 1.5061, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 3.801470588235294e-05, |
|
"loss": 1.4992, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.7977941176470587e-05, |
|
"loss": 1.4527, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 3.794117647058824e-05, |
|
"loss": 1.4815, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.7904411764705884e-05, |
|
"loss": 1.4803, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.786764705882353e-05, |
|
"loss": 1.5093, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 3.7830882352941175e-05, |
|
"loss": 1.5159, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.779411764705883e-05, |
|
"loss": 1.5017, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.775735294117647e-05, |
|
"loss": 1.5075, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 3.772058823529412e-05, |
|
"loss": 1.5284, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 3.768382352941176e-05, |
|
"loss": 1.4895, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 3.7647058823529415e-05, |
|
"loss": 1.4893, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 3.761029411764706e-05, |
|
"loss": 1.4695, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.7573529411764706e-05, |
|
"loss": 1.4988, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.753676470588235e-05, |
|
"loss": 1.4898, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.4561, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.746323529411765e-05, |
|
"loss": 1.496, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_gen_len": 700.216049382716, |
|
"eval_loss": 1.438555359840393, |
|
"eval_rouge1": 72.1271, |
|
"eval_rouge2": 41.1645, |
|
"eval_rougeL": 38.4096, |
|
"eval_rougeLsum": 69.6176, |
|
"eval_runtime": 1598.9004, |
|
"eval_samples_per_second": 0.608, |
|
"eval_steps_per_second": 0.076, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 3.7426470588235294e-05, |
|
"loss": 1.4707, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 3.7389705882352946e-05, |
|
"loss": 1.5299, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 3.735294117647059e-05, |
|
"loss": 1.46, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 3.731617647058824e-05, |
|
"loss": 1.4996, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 3.727941176470588e-05, |
|
"loss": 1.4146, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 3.7242647058823535e-05, |
|
"loss": 1.3875, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 3.720588235294118e-05, |
|
"loss": 1.4534, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 3.7169117647058825e-05, |
|
"loss": 1.4974, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 3.713235294117647e-05, |
|
"loss": 1.4793, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 3.709558823529412e-05, |
|
"loss": 1.4864, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 3.705882352941177e-05, |
|
"loss": 1.4895, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 3.7022058823529414e-05, |
|
"loss": 1.4974, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 3.6985294117647066e-05, |
|
"loss": 1.4878, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 3.694852941176471e-05, |
|
"loss": 1.5243, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 3.6911764705882356e-05, |
|
"loss": 1.4038, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 3.6875e-05, |
|
"loss": 1.4994, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 3.6838235294117654e-05, |
|
"loss": 1.4496, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 3.68014705882353e-05, |
|
"loss": 1.4704, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 3.6764705882352945e-05, |
|
"loss": 1.4691, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 3.672794117647059e-05, |
|
"loss": 1.4459, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 3.6691176470588235e-05, |
|
"loss": 1.4827, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 3.665441176470588e-05, |
|
"loss": 1.4771, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 3.6617647058823526e-05, |
|
"loss": 1.4671, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 3.658088235294118e-05, |
|
"loss": 1.4551, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 3.6544117647058824e-05, |
|
"loss": 1.4747, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 3.650735294117647e-05, |
|
"loss": 1.4824, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 3.6470588235294114e-05, |
|
"loss": 1.4738, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 3.6433823529411767e-05, |
|
"loss": 1.445, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 3.639705882352941e-05, |
|
"loss": 1.4796, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 3.636029411764706e-05, |
|
"loss": 1.4632, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 3.632352941176471e-05, |
|
"loss": 1.4111, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 3.6286764705882355e-05, |
|
"loss": 1.497, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 3.625e-05, |
|
"loss": 1.502, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 3.6213235294117646e-05, |
|
"loss": 1.4615, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 3.61764705882353e-05, |
|
"loss": 1.4528, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 3.613970588235294e-05, |
|
"loss": 1.4344, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 3.610294117647059e-05, |
|
"loss": 1.4592, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 3.6066176470588234e-05, |
|
"loss": 1.4368, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 3.6029411764705886e-05, |
|
"loss": 1.4946, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 3.599264705882353e-05, |
|
"loss": 1.5049, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 3.595588235294118e-05, |
|
"loss": 1.4952, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 3.591911764705882e-05, |
|
"loss": 1.4093, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 3.5882352941176474e-05, |
|
"loss": 1.4677, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 3.584558823529412e-05, |
|
"loss": 1.4667, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 3.5808823529411765e-05, |
|
"loss": 1.4669, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 3.577205882352942e-05, |
|
"loss": 1.495, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 3.573529411764706e-05, |
|
"loss": 1.4924, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 3.569852941176471e-05, |
|
"loss": 1.4797, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 3.566176470588235e-05, |
|
"loss": 1.4765, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 3.5625000000000005e-05, |
|
"loss": 1.4418, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 3.558823529411765e-05, |
|
"loss": 1.4843, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 3.5551470588235296e-05, |
|
"loss": 1.456, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 3.551470588235294e-05, |
|
"loss": 1.5136, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 3.5477941176470594e-05, |
|
"loss": 1.4719, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 3.544117647058824e-05, |
|
"loss": 1.4212, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 3.5404411764705884e-05, |
|
"loss": 1.4173, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 3.5367647058823536e-05, |
|
"loss": 1.4575, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 3.533088235294118e-05, |
|
"loss": 1.4826, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 3.529411764705883e-05, |
|
"loss": 1.421, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 3.525735294117647e-05, |
|
"loss": 1.4342, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 3.5220588235294125e-05, |
|
"loss": 1.4659, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 3.518382352941177e-05, |
|
"loss": 1.4755, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 3.514705882352941e-05, |
|
"loss": 1.474, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 3.511029411764706e-05, |
|
"loss": 1.4797, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 3.5073529411764706e-05, |
|
"loss": 1.4609, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 3.503676470588235e-05, |
|
"loss": 1.4638, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.4634, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 3.496323529411765e-05, |
|
"loss": 1.4258, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_gen_len": 743.7067901234568, |
|
"eval_loss": 1.4373914003372192, |
|
"eval_rouge1": 71.9975, |
|
"eval_rouge2": 41.0013, |
|
"eval_rougeL": 37.9947, |
|
"eval_rougeLsum": 69.449, |
|
"eval_runtime": 1641.7585, |
|
"eval_samples_per_second": 0.592, |
|
"eval_steps_per_second": 0.074, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 3.4926470588235294e-05, |
|
"loss": 1.4952, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 3.488970588235294e-05, |
|
"loss": 1.4216, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 3.4852941176470585e-05, |
|
"loss": 1.4445, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 3.481617647058824e-05, |
|
"loss": 1.4159, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 3.477941176470588e-05, |
|
"loss": 1.3788, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 3.474264705882353e-05, |
|
"loss": 1.4245, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 3.470588235294118e-05, |
|
"loss": 1.4599, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 3.4669117647058826e-05, |
|
"loss": 1.4125, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 3.463235294117647e-05, |
|
"loss": 1.4442, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 3.4595588235294116e-05, |
|
"loss": 1.41, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 3.455882352941177e-05, |
|
"loss": 1.4069, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 3.4522058823529414e-05, |
|
"loss": 1.4513, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 3.448529411764706e-05, |
|
"loss": 1.4286, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 3.4448529411764705e-05, |
|
"loss": 1.472, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 3.441176470588236e-05, |
|
"loss": 1.4066, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 3.4375e-05, |
|
"loss": 1.4513, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.433823529411765e-05, |
|
"loss": 1.4255, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 3.430147058823529e-05, |
|
"loss": 1.479, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 3.4264705882352945e-05, |
|
"loss": 1.4979, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 3.422794117647059e-05, |
|
"loss": 1.4266, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 3.4191176470588236e-05, |
|
"loss": 1.4678, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 3.415441176470589e-05, |
|
"loss": 1.3908, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 3.411764705882353e-05, |
|
"loss": 1.445, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 3.408088235294118e-05, |
|
"loss": 1.4338, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 3.4044117647058824e-05, |
|
"loss": 1.439, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 3.4007352941176476e-05, |
|
"loss": 1.4452, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 3.397058823529412e-05, |
|
"loss": 1.4424, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 3.393382352941177e-05, |
|
"loss": 1.4427, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 3.389705882352941e-05, |
|
"loss": 1.4691, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 3.3860294117647064e-05, |
|
"loss": 1.4458, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 3.382352941176471e-05, |
|
"loss": 1.3844, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 3.3786764705882355e-05, |
|
"loss": 1.4202, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 3.375000000000001e-05, |
|
"loss": 1.4793, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 3.371323529411765e-05, |
|
"loss": 1.4329, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 3.36764705882353e-05, |
|
"loss": 1.4631, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 3.363970588235294e-05, |
|
"loss": 1.4744, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 3.3602941176470595e-05, |
|
"loss": 1.4477, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 3.3566176470588234e-05, |
|
"loss": 1.4235, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 3.352941176470588e-05, |
|
"loss": 1.4051, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 3.349264705882353e-05, |
|
"loss": 1.4575, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 3.345588235294118e-05, |
|
"loss": 1.4539, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.341911764705882e-05, |
|
"loss": 1.4338, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 3.338235294117647e-05, |
|
"loss": 1.4074, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 3.334558823529412e-05, |
|
"loss": 1.4157, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 3.3308823529411765e-05, |
|
"loss": 1.4574, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.327205882352941e-05, |
|
"loss": 1.4211, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 3.3235294117647056e-05, |
|
"loss": 1.4321, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 3.319852941176471e-05, |
|
"loss": 1.439, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 3.3161764705882353e-05, |
|
"loss": 1.4324, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 3.3125e-05, |
|
"loss": 1.4745, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 3.308823529411765e-05, |
|
"loss": 1.4624, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 3.3051470588235296e-05, |
|
"loss": 1.4574, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 3.301470588235294e-05, |
|
"loss": 1.4399, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 3.297794117647059e-05, |
|
"loss": 1.4982, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 3.294117647058824e-05, |
|
"loss": 1.4166, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 3.2904411764705885e-05, |
|
"loss": 1.4914, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 3.286764705882353e-05, |
|
"loss": 1.4439, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 3.2830882352941175e-05, |
|
"loss": 1.4482, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 3.279411764705883e-05, |
|
"loss": 1.413, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 3.275735294117647e-05, |
|
"loss": 1.4354, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 3.272058823529412e-05, |
|
"loss": 1.4421, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 3.2683823529411764e-05, |
|
"loss": 1.4091, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 3.2647058823529416e-05, |
|
"loss": 1.4231, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.261029411764706e-05, |
|
"loss": 1.4688, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.2573529411764706e-05, |
|
"loss": 1.4393, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 3.253676470588236e-05, |
|
"loss": 1.4945, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 1.4128, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 3.246323529411765e-05, |
|
"loss": 1.4301, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_gen_len": 724.5061728395061, |
|
"eval_loss": 1.4296016693115234, |
|
"eval_rouge1": 71.8896, |
|
"eval_rouge2": 40.8303, |
|
"eval_rougeL": 38.346, |
|
"eval_rougeLsum": 69.357, |
|
"eval_runtime": 1562.869, |
|
"eval_samples_per_second": 0.622, |
|
"eval_steps_per_second": 0.078, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 3.2426470588235295e-05, |
|
"loss": 1.4917, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 3.238970588235295e-05, |
|
"loss": 1.3842, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 3.235294117647059e-05, |
|
"loss": 1.3968, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 3.231617647058824e-05, |
|
"loss": 1.4171, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 3.227941176470588e-05, |
|
"loss": 1.4565, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 3.2242647058823535e-05, |
|
"loss": 1.4157, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 3.220588235294118e-05, |
|
"loss": 1.4288, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 3.2169117647058826e-05, |
|
"loss": 1.4051, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 3.213235294117647e-05, |
|
"loss": 1.4092, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 3.209558823529412e-05, |
|
"loss": 1.4243, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 3.205882352941177e-05, |
|
"loss": 1.4042, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 3.202205882352941e-05, |
|
"loss": 1.4389, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 3.198529411764706e-05, |
|
"loss": 1.4031, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 3.1948529411764705e-05, |
|
"loss": 1.3706, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.191176470588235e-05, |
|
"loss": 1.4254, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 3.1875e-05, |
|
"loss": 1.4442, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 3.183823529411765e-05, |
|
"loss": 1.384, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 3.180147058823529e-05, |
|
"loss": 1.4263, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 3.176470588235294e-05, |
|
"loss": 1.4057, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 3.172794117647059e-05, |
|
"loss": 1.4213, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 3.1691176470588236e-05, |
|
"loss": 1.4214, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 3.165441176470588e-05, |
|
"loss": 1.4503, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 3.161764705882353e-05, |
|
"loss": 1.3982, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 3.158088235294118e-05, |
|
"loss": 1.431, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 3.1544117647058824e-05, |
|
"loss": 1.3804, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 3.150735294117647e-05, |
|
"loss": 1.4094, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 3.147058823529412e-05, |
|
"loss": 1.3905, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 3.143382352941177e-05, |
|
"loss": 1.4601, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 3.139705882352941e-05, |
|
"loss": 1.3997, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 3.136029411764706e-05, |
|
"loss": 1.4487, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 3.132352941176471e-05, |
|
"loss": 1.4067, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 3.1286764705882355e-05, |
|
"loss": 1.4207, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 3.125e-05, |
|
"loss": 1.4311, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 3.1213235294117646e-05, |
|
"loss": 1.4227, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 3.11764705882353e-05, |
|
"loss": 1.376, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 3.1139705882352944e-05, |
|
"loss": 1.4088, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 3.110294117647059e-05, |
|
"loss": 1.424, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 3.1066176470588234e-05, |
|
"loss": 1.4597, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 3.1029411764705886e-05, |
|
"loss": 1.438, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 3.099264705882353e-05, |
|
"loss": 1.4579, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 3.095588235294118e-05, |
|
"loss": 1.4094, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 3.091911764705883e-05, |
|
"loss": 1.4407, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 3.0882352941176475e-05, |
|
"loss": 1.4369, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 3.084558823529412e-05, |
|
"loss": 1.4274, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 3.0808823529411765e-05, |
|
"loss": 1.3982, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 3.077205882352942e-05, |
|
"loss": 1.4094, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 3.073529411764706e-05, |
|
"loss": 1.4243, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 3.069852941176471e-05, |
|
"loss": 1.4223, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 3.0661764705882354e-05, |
|
"loss": 1.3813, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 3.0625000000000006e-05, |
|
"loss": 1.4707, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 3.058823529411765e-05, |
|
"loss": 1.4154, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 3.0551470588235297e-05, |
|
"loss": 1.417, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 3.0514705882352945e-05, |
|
"loss": 1.4043, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 3.0477941176470594e-05, |
|
"loss": 1.3876, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 3.0441176470588233e-05, |
|
"loss": 1.3565, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 3.040441176470588e-05, |
|
"loss": 1.405, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 3.036764705882353e-05, |
|
"loss": 1.4422, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 3.0330882352941176e-05, |
|
"loss": 1.4294, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 3.0294117647058824e-05, |
|
"loss": 1.4392, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 3.025735294117647e-05, |
|
"loss": 1.3836, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 3.022058823529412e-05, |
|
"loss": 1.3834, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 3.0183823529411764e-05, |
|
"loss": 1.4444, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 3.0147058823529413e-05, |
|
"loss": 1.374, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 3.0110294117647058e-05, |
|
"loss": 1.4511, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 3.0073529411764707e-05, |
|
"loss": 1.4613, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 3.0036764705882352e-05, |
|
"loss": 1.362, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4456, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 2.9963235294117646e-05, |
|
"loss": 1.4015, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_gen_len": 731.2685185185185, |
|
"eval_loss": 1.4312541484832764, |
|
"eval_rouge1": 72.0031, |
|
"eval_rouge2": 40.9229, |
|
"eval_rougeL": 38.2581, |
|
"eval_rougeLsum": 69.4154, |
|
"eval_runtime": 1579.5755, |
|
"eval_samples_per_second": 0.615, |
|
"eval_steps_per_second": 0.077, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.9926470588235295e-05, |
|
"loss": 1.3968, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 2.988970588235294e-05, |
|
"loss": 1.3402, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 2.985294117647059e-05, |
|
"loss": 1.3487, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 2.9816176470588238e-05, |
|
"loss": 1.3428, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 2.9779411764705883e-05, |
|
"loss": 1.3522, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 2.9742647058823532e-05, |
|
"loss": 1.3956, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 2.9705882352941177e-05, |
|
"loss": 1.3808, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 2.9669117647058826e-05, |
|
"loss": 1.3745, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 2.963235294117647e-05, |
|
"loss": 1.4278, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 2.959558823529412e-05, |
|
"loss": 1.3555, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 2.9558823529411766e-05, |
|
"loss": 1.4131, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 2.9522058823529414e-05, |
|
"loss": 1.3813, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 2.948529411764706e-05, |
|
"loss": 1.4082, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 2.944852941176471e-05, |
|
"loss": 1.3813, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 2.9411764705882354e-05, |
|
"loss": 1.3513, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 2.9375000000000003e-05, |
|
"loss": 1.4245, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 2.933823529411765e-05, |
|
"loss": 1.3952, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 2.9301470588235297e-05, |
|
"loss": 1.4263, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 2.9264705882352945e-05, |
|
"loss": 1.3879, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 2.922794117647059e-05, |
|
"loss": 1.4151, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 2.919117647058824e-05, |
|
"loss": 1.413, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 2.9154411764705885e-05, |
|
"loss": 1.3681, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 2.9117647058823534e-05, |
|
"loss": 1.4315, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 2.908088235294118e-05, |
|
"loss": 1.4176, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 2.9044117647058828e-05, |
|
"loss": 1.3543, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 2.9007352941176473e-05, |
|
"loss": 1.3848, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 2.8970588235294122e-05, |
|
"loss": 1.4014, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 2.8933823529411767e-05, |
|
"loss": 1.3431, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 2.889705882352941e-05, |
|
"loss": 1.4267, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 2.8860294117647058e-05, |
|
"loss": 1.3691, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 2.8823529411764703e-05, |
|
"loss": 1.431, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 2.8786764705882352e-05, |
|
"loss": 1.407, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 2.8749999999999997e-05, |
|
"loss": 1.3792, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 2.8713235294117646e-05, |
|
"loss": 1.4238, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 2.8676470588235295e-05, |
|
"loss": 1.3587, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 2.863970588235294e-05, |
|
"loss": 1.4583, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 2.860294117647059e-05, |
|
"loss": 1.3723, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 2.8566176470588234e-05, |
|
"loss": 1.3941, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 2.8529411764705883e-05, |
|
"loss": 1.3643, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 2.849264705882353e-05, |
|
"loss": 1.395, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 2.8455882352941177e-05, |
|
"loss": 1.4268, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 2.8419117647058823e-05, |
|
"loss": 1.3778, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 2.838235294117647e-05, |
|
"loss": 1.3982, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 2.8345588235294117e-05, |
|
"loss": 1.4177, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 2.8308823529411766e-05, |
|
"loss": 1.4038, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 2.827205882352941e-05, |
|
"loss": 1.3917, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 2.823529411764706e-05, |
|
"loss": 1.3826, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 2.819852941176471e-05, |
|
"loss": 1.3897, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 2.8161764705882354e-05, |
|
"loss": 1.4077, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 2.8125000000000003e-05, |
|
"loss": 1.3959, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 2.8088235294117648e-05, |
|
"loss": 1.4188, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 2.8051470588235297e-05, |
|
"loss": 1.42, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 2.8014705882352942e-05, |
|
"loss": 1.4458, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 2.797794117647059e-05, |
|
"loss": 1.4319, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 2.7941176470588236e-05, |
|
"loss": 1.3902, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 2.7904411764705885e-05, |
|
"loss": 1.4131, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 2.786764705882353e-05, |
|
"loss": 1.3798, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 2.783088235294118e-05, |
|
"loss": 1.4616, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 2.7794117647058824e-05, |
|
"loss": 1.3843, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 2.7757352941176473e-05, |
|
"loss": 1.4045, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 2.7720588235294122e-05, |
|
"loss": 1.3807, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 2.7683823529411767e-05, |
|
"loss": 1.4065, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 2.7647058823529416e-05, |
|
"loss": 1.384, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 2.761029411764706e-05, |
|
"loss": 1.3956, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 2.757352941176471e-05, |
|
"loss": 1.4194, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 2.7536764705882356e-05, |
|
"loss": 1.3842, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 1.3934, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 2.746323529411765e-05, |
|
"loss": 1.391, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"eval_gen_len": 751.0288065843621, |
|
"eval_loss": 1.4266291856765747, |
|
"eval_rouge1": 71.9948, |
|
"eval_rouge2": 41.0084, |
|
"eval_rougeL": 38.0938, |
|
"eval_rougeLsum": 69.4488, |
|
"eval_runtime": 1628.8819, |
|
"eval_samples_per_second": 0.597, |
|
"eval_steps_per_second": 0.075, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 2.74264705882353e-05, |
|
"loss": 1.3653, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 2.7389705882352944e-05, |
|
"loss": 1.3824, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 2.7352941176470593e-05, |
|
"loss": 1.3889, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 2.7316176470588235e-05, |
|
"loss": 1.3367, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 2.727941176470588e-05, |
|
"loss": 1.3948, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 2.724264705882353e-05, |
|
"loss": 1.3637, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 2.7205882352941174e-05, |
|
"loss": 1.3765, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 2.7169117647058823e-05, |
|
"loss": 1.4013, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 2.7132352941176468e-05, |
|
"loss": 1.3769, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 2.7095588235294117e-05, |
|
"loss": 1.3869, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 2.7058823529411766e-05, |
|
"loss": 1.411, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 2.702205882352941e-05, |
|
"loss": 1.3683, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 2.698529411764706e-05, |
|
"loss": 1.3908, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 2.6948529411764705e-05, |
|
"loss": 1.3476, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 2.6911764705882354e-05, |
|
"loss": 1.3895, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 2.6875e-05, |
|
"loss": 1.3803, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 2.6838235294117648e-05, |
|
"loss": 1.3206, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 2.6801470588235293e-05, |
|
"loss": 1.4003, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 2.6764705882352942e-05, |
|
"loss": 1.3715, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 2.6727941176470588e-05, |
|
"loss": 1.3392, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 2.6691176470588236e-05, |
|
"loss": 1.3589, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 2.6654411764705882e-05, |
|
"loss": 1.3877, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 2.661764705882353e-05, |
|
"loss": 1.3362, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 2.658088235294118e-05, |
|
"loss": 1.4065, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 2.6544117647058825e-05, |
|
"loss": 1.3609, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 2.6507352941176473e-05, |
|
"loss": 1.3775, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 2.647058823529412e-05, |
|
"loss": 1.3911, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 2.6433823529411767e-05, |
|
"loss": 1.3185, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 2.6397058823529413e-05, |
|
"loss": 1.3545, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 2.636029411764706e-05, |
|
"loss": 1.3931, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 2.6323529411764707e-05, |
|
"loss": 1.3904, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 2.6286764705882356e-05, |
|
"loss": 1.3639, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.625e-05, |
|
"loss": 1.3596, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 2.621323529411765e-05, |
|
"loss": 1.3503, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 2.6176470588235295e-05, |
|
"loss": 1.3422, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.6139705882352944e-05, |
|
"loss": 1.3685, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.6102941176470593e-05, |
|
"loss": 1.3588, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.6066176470588238e-05, |
|
"loss": 1.3447, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.6029411764705887e-05, |
|
"loss": 1.3905, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 2.5992647058823532e-05, |
|
"loss": 1.355, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.595588235294118e-05, |
|
"loss": 1.4111, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 2.5919117647058826e-05, |
|
"loss": 1.3899, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 2.5882352941176475e-05, |
|
"loss": 1.3392, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 2.584558823529412e-05, |
|
"loss": 1.4007, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 2.580882352941177e-05, |
|
"loss": 1.3787, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 2.577205882352941e-05, |
|
"loss": 1.3836, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 2.5735294117647057e-05, |
|
"loss": 1.3484, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 2.5698529411764705e-05, |
|
"loss": 1.3679, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 2.566176470588235e-05, |
|
"loss": 1.3896, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 2.5625e-05, |
|
"loss": 1.3651, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 2.5588235294117645e-05, |
|
"loss": 1.3793, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 2.5551470588235294e-05, |
|
"loss": 1.4282, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 2.551470588235294e-05, |
|
"loss": 1.3675, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 2.5477941176470588e-05, |
|
"loss": 1.4051, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 2.5441176470588236e-05, |
|
"loss": 1.3748, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 2.5404411764705882e-05, |
|
"loss": 1.3787, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 2.536764705882353e-05, |
|
"loss": 1.3327, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 2.5330882352941176e-05, |
|
"loss": 1.4099, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 2.5294117647058825e-05, |
|
"loss": 1.3376, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 2.525735294117647e-05, |
|
"loss": 1.3869, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 2.522058823529412e-05, |
|
"loss": 1.375, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 2.5183823529411764e-05, |
|
"loss": 1.3683, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 2.5147058823529413e-05, |
|
"loss": 1.4441, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 2.511029411764706e-05, |
|
"loss": 1.3933, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 2.5073529411764707e-05, |
|
"loss": 1.395, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.5036764705882352e-05, |
|
"loss": 1.3576, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.3596, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 2.496323529411765e-05, |
|
"loss": 1.3993, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 2.4926470588235295e-05, |
|
"loss": 1.3642, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_gen_len": 756.9567901234568, |
|
"eval_loss": 1.4286822080612183, |
|
"eval_rouge1": 71.9115, |
|
"eval_rouge2": 40.8683, |
|
"eval_rougeL": 38.1602, |
|
"eval_rougeLsum": 69.3514, |
|
"eval_runtime": 1650.4445, |
|
"eval_samples_per_second": 0.589, |
|
"eval_steps_per_second": 0.074, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 2.4889705882352944e-05, |
|
"loss": 1.3945, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 2.485294117647059e-05, |
|
"loss": 1.363, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 2.4816176470588238e-05, |
|
"loss": 1.3679, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 2.4779411764705884e-05, |
|
"loss": 1.3388, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 2.4742647058823532e-05, |
|
"loss": 1.3329, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 2.4705882352941178e-05, |
|
"loss": 1.3307, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 2.4669117647058826e-05, |
|
"loss": 1.3527, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 2.4632352941176472e-05, |
|
"loss": 1.3653, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 2.4595588235294117e-05, |
|
"loss": 1.382, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 2.4558823529411766e-05, |
|
"loss": 1.3889, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 2.452205882352941e-05, |
|
"loss": 1.3916, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 2.448529411764706e-05, |
|
"loss": 1.3198, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 2.4448529411764705e-05, |
|
"loss": 1.3155, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 2.4411764705882354e-05, |
|
"loss": 1.3847, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"learning_rate": 2.4375e-05, |
|
"loss": 1.349, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 2.433823529411765e-05, |
|
"loss": 1.352, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 2.4301470588235294e-05, |
|
"loss": 1.3717, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 2.4264705882352942e-05, |
|
"loss": 1.3683, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 2.4227941176470588e-05, |
|
"loss": 1.3213, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 2.4191176470588237e-05, |
|
"loss": 1.3255, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 2.4154411764705885e-05, |
|
"loss": 1.3526, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 2.411764705882353e-05, |
|
"loss": 1.3597, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 2.408088235294118e-05, |
|
"loss": 1.3518, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 2.4044117647058825e-05, |
|
"loss": 1.3943, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 2.4007352941176474e-05, |
|
"loss": 1.373, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 2.397058823529412e-05, |
|
"loss": 1.3419, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 2.3933823529411768e-05, |
|
"loss": 1.3706, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 2.3897058823529413e-05, |
|
"loss": 1.3642, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 2.3860294117647062e-05, |
|
"loss": 1.3543, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 2.3823529411764707e-05, |
|
"loss": 1.3694, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 2.3786764705882353e-05, |
|
"loss": 1.309, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 2.375e-05, |
|
"loss": 1.3165, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 2.3713235294117647e-05, |
|
"loss": 1.3724, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 2.3676470588235295e-05, |
|
"loss": 1.3499, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 2.363970588235294e-05, |
|
"loss": 1.3617, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 2.360294117647059e-05, |
|
"loss": 1.3622, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"learning_rate": 2.3566176470588235e-05, |
|
"loss": 1.3893, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 2.3529411764705884e-05, |
|
"loss": 1.3935, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 2.349264705882353e-05, |
|
"loss": 1.3686, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 2.3455882352941178e-05, |
|
"loss": 1.3825, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 2.3419117647058823e-05, |
|
"loss": 1.3421, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 2.3382352941176472e-05, |
|
"loss": 1.322, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 2.334558823529412e-05, |
|
"loss": 1.4137, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 2.3308823529411766e-05, |
|
"loss": 1.3694, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 2.3272058823529415e-05, |
|
"loss": 1.3782, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 2.323529411764706e-05, |
|
"loss": 1.3616, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 2.319852941176471e-05, |
|
"loss": 1.3411, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 2.3161764705882354e-05, |
|
"loss": 1.3463, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 2.3125000000000003e-05, |
|
"loss": 1.3614, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 2.308823529411765e-05, |
|
"loss": 1.3385, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 2.3051470588235297e-05, |
|
"loss": 1.3826, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 2.3014705882352943e-05, |
|
"loss": 1.3593, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 2.2977941176470588e-05, |
|
"loss": 1.3619, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 2.2941176470588237e-05, |
|
"loss": 1.34, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 2.2904411764705882e-05, |
|
"loss": 1.3325, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 2.286764705882353e-05, |
|
"loss": 1.3662, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 2.2830882352941176e-05, |
|
"loss": 1.3124, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 2.2794117647058825e-05, |
|
"loss": 1.3394, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 2.275735294117647e-05, |
|
"loss": 1.3887, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 2.272058823529412e-05, |
|
"loss": 1.3633, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 2.2683823529411764e-05, |
|
"loss": 1.3102, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 2.2647058823529413e-05, |
|
"loss": 1.3374, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 2.261029411764706e-05, |
|
"loss": 1.3936, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 2.2573529411764707e-05, |
|
"loss": 1.3102, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 2.2536764705882356e-05, |
|
"loss": 1.3755, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.3736, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 2.246323529411765e-05, |
|
"loss": 1.3589, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 2.2426470588235296e-05, |
|
"loss": 1.3516, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"eval_gen_len": 719.2798353909465, |
|
"eval_loss": 1.4288647174835205, |
|
"eval_rouge1": 72.3822, |
|
"eval_rouge2": 41.5074, |
|
"eval_rougeL": 38.8088, |
|
"eval_rougeLsum": 69.8232, |
|
"eval_runtime": 1557.6648, |
|
"eval_samples_per_second": 0.624, |
|
"eval_steps_per_second": 0.078, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 2.2389705882352944e-05, |
|
"loss": 1.3657, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"learning_rate": 2.235294117647059e-05, |
|
"loss": 1.3212, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 2.231617647058824e-05, |
|
"loss": 1.3978, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 2.2279411764705884e-05, |
|
"loss": 1.3518, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 2.224264705882353e-05, |
|
"loss": 1.346, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 2.2205882352941178e-05, |
|
"loss": 1.3151, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 2.2169117647058823e-05, |
|
"loss": 1.319, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 2.2132352941176472e-05, |
|
"loss": 1.3212, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 2.2095588235294117e-05, |
|
"loss": 1.3204, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 2.2058823529411766e-05, |
|
"loss": 1.327, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 2.202205882352941e-05, |
|
"loss": 1.3346, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"learning_rate": 2.198529411764706e-05, |
|
"loss": 1.317, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 2.1948529411764706e-05, |
|
"loss": 1.3602, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 2.1911764705882354e-05, |
|
"loss": 1.3381, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 2.1875e-05, |
|
"loss": 1.3612, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"learning_rate": 2.183823529411765e-05, |
|
"loss": 1.354, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 2.1801470588235294e-05, |
|
"loss": 1.3243, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"learning_rate": 2.1764705882352943e-05, |
|
"loss": 1.3307, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 2.172794117647059e-05, |
|
"loss": 1.3548, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 2.1691176470588237e-05, |
|
"loss": 1.3255, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 2.1654411764705886e-05, |
|
"loss": 1.315, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 2.161764705882353e-05, |
|
"loss": 1.3557, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 2.158088235294118e-05, |
|
"loss": 1.3271, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 2.1544117647058825e-05, |
|
"loss": 1.3445, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 2.1507352941176474e-05, |
|
"loss": 1.3348, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 2.1470588235294116e-05, |
|
"loss": 1.3343, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 2.1433823529411765e-05, |
|
"loss": 1.349, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 2.1397058823529413e-05, |
|
"loss": 1.3468, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 2.136029411764706e-05, |
|
"loss": 1.3431, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 2.1323529411764707e-05, |
|
"loss": 1.3461, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 2.1286764705882353e-05, |
|
"loss": 1.3521, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 2.125e-05, |
|
"loss": 1.3195, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 2.1213235294117647e-05, |
|
"loss": 1.3161, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 2.1176470588235296e-05, |
|
"loss": 1.3336, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 2.113970588235294e-05, |
|
"loss": 1.3297, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 2.110294117647059e-05, |
|
"loss": 1.3476, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 11.54, |
|
"learning_rate": 2.1066176470588235e-05, |
|
"loss": 1.3284, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 2.1029411764705884e-05, |
|
"loss": 1.3674, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 11.57, |
|
"learning_rate": 2.099264705882353e-05, |
|
"loss": 1.3074, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 2.0955882352941178e-05, |
|
"loss": 1.3501, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 2.0919117647058827e-05, |
|
"loss": 1.3241, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 2.0882352941176472e-05, |
|
"loss": 1.3565, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 2.084558823529412e-05, |
|
"loss": 1.3133, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 2.0808823529411766e-05, |
|
"loss": 1.3516, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 2.0772058823529415e-05, |
|
"loss": 1.3538, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 2.073529411764706e-05, |
|
"loss": 1.3515, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 2.0698529411764706e-05, |
|
"loss": 1.3462, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 2.066176470588235e-05, |
|
"loss": 1.3422, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 2.0625e-05, |
|
"loss": 1.3305, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 2.058823529411765e-05, |
|
"loss": 1.3876, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 2.0551470588235294e-05, |
|
"loss": 1.3291, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 2.0514705882352943e-05, |
|
"loss": 1.3781, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 2.0477941176470588e-05, |
|
"loss": 1.3234, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 2.0441176470588237e-05, |
|
"loss": 1.3116, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 2.0404411764705882e-05, |
|
"loss": 1.3286, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 2.036764705882353e-05, |
|
"loss": 1.3293, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 2.0330882352941176e-05, |
|
"loss": 1.3454, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 2.0294117647058825e-05, |
|
"loss": 1.3543, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 2.025735294117647e-05, |
|
"loss": 1.3454, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 2.022058823529412e-05, |
|
"loss": 1.3094, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 2.0183823529411765e-05, |
|
"loss": 1.3677, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 2.0147058823529413e-05, |
|
"loss": 1.3973, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 2.0110294117647062e-05, |
|
"loss": 1.3076, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 2.0073529411764708e-05, |
|
"loss": 1.3677, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 2.0036764705882356e-05, |
|
"loss": 1.3758, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 2e-05, |
|
"loss": 1.3601, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 1.996323529411765e-05, |
|
"loss": 1.3611, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 1.9926470588235296e-05, |
|
"loss": 1.3243, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_gen_len": 749.9475308641976, |
|
"eval_loss": 1.4301081895828247, |
|
"eval_rouge1": 71.83, |
|
"eval_rouge2": 40.764, |
|
"eval_rougeL": 38.1124, |
|
"eval_rougeLsum": 69.2767, |
|
"eval_runtime": 1620.9293, |
|
"eval_samples_per_second": 0.6, |
|
"eval_steps_per_second": 0.075, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 1.988970588235294e-05, |
|
"loss": 1.357, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 12.02, |
|
"learning_rate": 1.9852941176470586e-05, |
|
"loss": 1.3135, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 1.9816176470588235e-05, |
|
"loss": 1.3555, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 1.9779411764705884e-05, |
|
"loss": 1.2697, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 1.974264705882353e-05, |
|
"loss": 1.3532, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 1.9705882352941178e-05, |
|
"loss": 1.3258, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 1.9669117647058824e-05, |
|
"loss": 1.325, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 1.9632352941176472e-05, |
|
"loss": 1.2975, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 1.9595588235294118e-05, |
|
"loss": 1.2828, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 1.9558823529411766e-05, |
|
"loss": 1.312, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 1.9522058823529412e-05, |
|
"loss": 1.3121, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 1.948529411764706e-05, |
|
"loss": 1.3396, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 1.9448529411764706e-05, |
|
"loss": 1.2769, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"learning_rate": 1.9411764705882355e-05, |
|
"loss": 1.3263, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 1.9375e-05, |
|
"loss": 1.326, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"learning_rate": 1.933823529411765e-05, |
|
"loss": 1.3461, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 1.9301470588235298e-05, |
|
"loss": 1.3487, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 1.9264705882352943e-05, |
|
"loss": 1.3022, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 1.922794117647059e-05, |
|
"loss": 1.3561, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"learning_rate": 1.9191176470588237e-05, |
|
"loss": 1.3453, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 1.9154411764705886e-05, |
|
"loss": 1.3329, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 1.9117647058823528e-05, |
|
"loss": 1.2951, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 1.9080882352941176e-05, |
|
"loss": 1.3186, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 1.9044117647058822e-05, |
|
"loss": 1.3403, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"learning_rate": 1.900735294117647e-05, |
|
"loss": 1.2859, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 1.897058823529412e-05, |
|
"loss": 1.3411, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 1.8933823529411765e-05, |
|
"loss": 1.3516, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 1.8897058823529413e-05, |
|
"loss": 1.3255, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 1.886029411764706e-05, |
|
"loss": 1.3153, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 1.8823529411764708e-05, |
|
"loss": 1.2845, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 1.8786764705882353e-05, |
|
"loss": 1.3377, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 1.373, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 1.8713235294117647e-05, |
|
"loss": 1.2894, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 1.8676470588235296e-05, |
|
"loss": 1.3133, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 1.863970588235294e-05, |
|
"loss": 1.3286, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"learning_rate": 1.860294117647059e-05, |
|
"loss": 1.3867, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 1.8566176470588235e-05, |
|
"loss": 1.2944, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 1.8529411764705884e-05, |
|
"loss": 1.3457, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 1.8492647058823533e-05, |
|
"loss": 1.3665, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 1.8455882352941178e-05, |
|
"loss": 1.3161, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"learning_rate": 1.8419117647058827e-05, |
|
"loss": 1.3251, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 1.8382352941176472e-05, |
|
"loss": 1.36, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 12.62, |
|
"learning_rate": 1.8345588235294118e-05, |
|
"loss": 1.3419, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 1.8308823529411763e-05, |
|
"loss": 1.3806, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 12.65, |
|
"learning_rate": 1.8272058823529412e-05, |
|
"loss": 1.3691, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 1.8235294117647057e-05, |
|
"loss": 1.3219, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 12.68, |
|
"learning_rate": 1.8198529411764706e-05, |
|
"loss": 1.3191, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 1.8161764705882355e-05, |
|
"loss": 1.2994, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 1.8125e-05, |
|
"loss": 1.3314, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 1.808823529411765e-05, |
|
"loss": 1.2967, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 1.8051470588235294e-05, |
|
"loss": 1.3496, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 1.8014705882352943e-05, |
|
"loss": 1.3272, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 1.797794117647059e-05, |
|
"loss": 1.2768, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"learning_rate": 1.7941176470588237e-05, |
|
"loss": 1.3192, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 1.7904411764705882e-05, |
|
"loss": 1.344, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 1.786764705882353e-05, |
|
"loss": 1.3883, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 1.7830882352941177e-05, |
|
"loss": 1.3605, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 1.7794117647058825e-05, |
|
"loss": 1.3313, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 1.775735294117647e-05, |
|
"loss": 1.2923, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 1.772058823529412e-05, |
|
"loss": 1.2899, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 1.7683823529411768e-05, |
|
"loss": 1.3147, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"learning_rate": 1.7647058823529414e-05, |
|
"loss": 1.3371, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 1.7610294117647062e-05, |
|
"loss": 1.3207, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 1.7573529411764704e-05, |
|
"loss": 1.3458, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"learning_rate": 1.7536764705882353e-05, |
|
"loss": 1.3439, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 1.75e-05, |
|
"loss": 1.3251, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 1.7463235294117647e-05, |
|
"loss": 1.3033, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 1.7426470588235293e-05, |
|
"loss": 1.3582, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"eval_gen_len": 736.6697530864197, |
|
"eval_loss": 1.4282923936843872, |
|
"eval_rouge1": 71.9495, |
|
"eval_rouge2": 40.9556, |
|
"eval_rougeL": 38.4201, |
|
"eval_rougeLsum": 69.4394, |
|
"eval_runtime": 1583.479, |
|
"eval_samples_per_second": 0.614, |
|
"eval_steps_per_second": 0.077, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 1.738970588235294e-05, |
|
"loss": 1.2927, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"learning_rate": 1.735294117647059e-05, |
|
"loss": 1.3201, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 1.7316176470588235e-05, |
|
"loss": 1.3495, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 1.7279411764705884e-05, |
|
"loss": 1.3304, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"learning_rate": 1.724264705882353e-05, |
|
"loss": 1.313, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 1.720588235294118e-05, |
|
"loss": 1.3057, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"learning_rate": 1.7169117647058824e-05, |
|
"loss": 1.2878, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 1.7132352941176472e-05, |
|
"loss": 1.3116, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 1.7095588235294118e-05, |
|
"loss": 1.3044, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 1.7058823529411767e-05, |
|
"loss": 1.3104, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"learning_rate": 1.7022058823529412e-05, |
|
"loss": 1.3194, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 1.698529411764706e-05, |
|
"loss": 1.2856, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 1.6948529411764706e-05, |
|
"loss": 1.3316, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 1.6911764705882355e-05, |
|
"loss": 1.2928, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 1.6875000000000004e-05, |
|
"loss": 1.3015, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"learning_rate": 1.683823529411765e-05, |
|
"loss": 1.2799, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 1.6801470588235298e-05, |
|
"loss": 1.2931, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 1.676470588235294e-05, |
|
"loss": 1.3208, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 1.672794117647059e-05, |
|
"loss": 1.2931, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 1.6691176470588234e-05, |
|
"loss": 1.3227, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 1.6654411764705883e-05, |
|
"loss": 1.2942, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"learning_rate": 1.6617647058823528e-05, |
|
"loss": 1.2788, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 1.6580882352941177e-05, |
|
"loss": 1.3047, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 1.6544117647058825e-05, |
|
"loss": 1.3175, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 13.35, |
|
"learning_rate": 1.650735294117647e-05, |
|
"loss": 1.2905, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 1.647058823529412e-05, |
|
"loss": 1.3043, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 1.6433823529411765e-05, |
|
"loss": 1.2759, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 1.6397058823529414e-05, |
|
"loss": 1.3425, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 13.41, |
|
"learning_rate": 1.636029411764706e-05, |
|
"loss": 1.3027, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 1.6323529411764708e-05, |
|
"loss": 1.3053, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 1.6286764705882353e-05, |
|
"loss": 1.3119, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 13.46, |
|
"learning_rate": 1.6250000000000002e-05, |
|
"loss": 1.3279, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 1.6213235294117647e-05, |
|
"loss": 1.3614, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 1.6176470588235296e-05, |
|
"loss": 1.3282, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 1.613970588235294e-05, |
|
"loss": 1.3251, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 1.610294117647059e-05, |
|
"loss": 1.326, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"learning_rate": 1.6066176470588236e-05, |
|
"loss": 1.3205, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 1.6029411764705884e-05, |
|
"loss": 1.3226, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 1.599264705882353e-05, |
|
"loss": 1.3032, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 1.5955882352941175e-05, |
|
"loss": 1.3281, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 1.5919117647058824e-05, |
|
"loss": 1.3202, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 1.588235294117647e-05, |
|
"loss": 1.3221, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 1.5845588235294118e-05, |
|
"loss": 1.3169, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 1.5808823529411763e-05, |
|
"loss": 1.3271, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 1.5772058823529412e-05, |
|
"loss": 1.316, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 1.573529411764706e-05, |
|
"loss": 1.3288, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 1.5698529411764706e-05, |
|
"loss": 1.3265, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 1.5661764705882355e-05, |
|
"loss": 1.288, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 13.71, |
|
"learning_rate": 1.5625e-05, |
|
"loss": 1.3173, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 1.558823529411765e-05, |
|
"loss": 1.2725, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 1.5551470588235294e-05, |
|
"loss": 1.3313, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 1.5514705882352943e-05, |
|
"loss": 1.3461, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 1.547794117647059e-05, |
|
"loss": 1.3167, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 13.78, |
|
"learning_rate": 1.5441176470588237e-05, |
|
"loss": 1.31, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 1.5404411764705883e-05, |
|
"loss": 1.3019, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"learning_rate": 1.536764705882353e-05, |
|
"loss": 1.2956, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 1.5330882352941177e-05, |
|
"loss": 1.3674, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 1.5294117647058826e-05, |
|
"loss": 1.3059, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 1.5257352941176473e-05, |
|
"loss": 1.3666, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 1.5220588235294116e-05, |
|
"loss": 1.3184, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 1.5183823529411765e-05, |
|
"loss": 1.3217, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 1.5147058823529412e-05, |
|
"loss": 1.3442, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 1.511029411764706e-05, |
|
"loss": 1.3049, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 1.5073529411764706e-05, |
|
"loss": 1.3225, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 1.5036764705882353e-05, |
|
"loss": 1.2776, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.3137, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 1.4963235294117647e-05, |
|
"loss": 1.3005, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"learning_rate": 1.4926470588235295e-05, |
|
"loss": 1.3646, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 1.4889705882352942e-05, |
|
"loss": 1.3149, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_gen_len": 753.3230452674898, |
|
"eval_loss": 1.4298332929611206, |
|
"eval_rouge1": 71.9599, |
|
"eval_rouge2": 40.8875, |
|
"eval_rougeL": 38.2722, |
|
"eval_rougeLsum": 69.4209, |
|
"eval_runtime": 1612.8705, |
|
"eval_samples_per_second": 0.603, |
|
"eval_steps_per_second": 0.076, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 14.01, |
|
"learning_rate": 1.4852941176470589e-05, |
|
"loss": 1.2861, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 1.4816176470588236e-05, |
|
"loss": 1.3035, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 14.04, |
|
"learning_rate": 1.4779411764705883e-05, |
|
"loss": 1.3005, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 1.474264705882353e-05, |
|
"loss": 1.3289, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 1.4705882352941177e-05, |
|
"loss": 1.2952, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"learning_rate": 1.4669117647058826e-05, |
|
"loss": 1.3101, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"learning_rate": 1.4632352941176473e-05, |
|
"loss": 1.29, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 1.459558823529412e-05, |
|
"loss": 1.3145, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 1.4558823529411767e-05, |
|
"loss": 1.294, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"learning_rate": 1.4522058823529414e-05, |
|
"loss": 1.3079, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 1.4485294117647061e-05, |
|
"loss": 1.2614, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 1.4448529411764705e-05, |
|
"loss": 1.3538, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"learning_rate": 1.4411764705882352e-05, |
|
"loss": 1.3405, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 1.4374999999999999e-05, |
|
"loss": 1.309, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"learning_rate": 1.4338235294117647e-05, |
|
"loss": 1.3272, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"learning_rate": 1.4301470588235295e-05, |
|
"loss": 1.3025, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 14.25, |
|
"learning_rate": 1.4264705882352942e-05, |
|
"loss": 1.2779, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 1.4227941176470589e-05, |
|
"loss": 1.3401, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 14.28, |
|
"learning_rate": 1.4191176470588236e-05, |
|
"loss": 1.2977, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 1.4154411764705883e-05, |
|
"loss": 1.3437, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"learning_rate": 1.411764705882353e-05, |
|
"loss": 1.282, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 1.4080882352941177e-05, |
|
"loss": 1.2567, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 14.34, |
|
"learning_rate": 1.4044117647058824e-05, |
|
"loss": 1.3013, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"learning_rate": 1.4007352941176471e-05, |
|
"loss": 1.2499, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"learning_rate": 1.3970588235294118e-05, |
|
"loss": 1.2942, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 1.3933823529411765e-05, |
|
"loss": 1.2772, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 14.39, |
|
"learning_rate": 1.3897058823529412e-05, |
|
"loss": 1.3157, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"learning_rate": 1.3860294117647061e-05, |
|
"loss": 1.3004, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 14.42, |
|
"learning_rate": 1.3823529411764708e-05, |
|
"loss": 1.2854, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 1.3786764705882355e-05, |
|
"loss": 1.3026, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 1.3750000000000002e-05, |
|
"loss": 1.3356, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 1.371323529411765e-05, |
|
"loss": 1.3398, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"learning_rate": 1.3676470588235296e-05, |
|
"loss": 1.2938, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 1.363970588235294e-05, |
|
"loss": 1.3315, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 14.51, |
|
"learning_rate": 1.3602941176470587e-05, |
|
"loss": 1.311, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 1.3566176470588234e-05, |
|
"loss": 1.3192, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 1.3529411764705883e-05, |
|
"loss": 1.3083, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 1.349264705882353e-05, |
|
"loss": 1.2973, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 14.57, |
|
"learning_rate": 1.3455882352941177e-05, |
|
"loss": 1.3182, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"learning_rate": 1.3419117647058824e-05, |
|
"loss": 1.2913, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"learning_rate": 1.3382352941176471e-05, |
|
"loss": 1.2591, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 1.3345588235294118e-05, |
|
"loss": 1.3413, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 1.3308823529411765e-05, |
|
"loss": 1.2683, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 1.3272058823529412e-05, |
|
"loss": 1.2815, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 1.323529411764706e-05, |
|
"loss": 1.2921, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 1.3198529411764706e-05, |
|
"loss": 1.2715, |
|
"step": 2002 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 1.3161764705882353e-05, |
|
"loss": 1.3028, |
|
"step": 2004 |
|
}, |
|
{ |
|
"epoch": 14.7, |
|
"learning_rate": 1.3125e-05, |
|
"loss": 1.2744, |
|
"step": 2006 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 1.3088235294117648e-05, |
|
"loss": 1.2491, |
|
"step": 2008 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"learning_rate": 1.3051470588235296e-05, |
|
"loss": 1.305, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 1.3014705882352943e-05, |
|
"loss": 1.3338, |
|
"step": 2012 |
|
}, |
|
{ |
|
"epoch": 14.76, |
|
"learning_rate": 1.297794117647059e-05, |
|
"loss": 1.2969, |
|
"step": 2014 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 1.2941176470588238e-05, |
|
"loss": 1.3163, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 1.2904411764705885e-05, |
|
"loss": 1.3305, |
|
"step": 2018 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"learning_rate": 1.2867647058823528e-05, |
|
"loss": 1.3379, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 1.2830882352941175e-05, |
|
"loss": 1.3051, |
|
"step": 2022 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 1.2794117647058822e-05, |
|
"loss": 1.3007, |
|
"step": 2024 |
|
}, |
|
{ |
|
"epoch": 14.85, |
|
"learning_rate": 1.275735294117647e-05, |
|
"loss": 1.2793, |
|
"step": 2026 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"learning_rate": 1.2720588235294118e-05, |
|
"loss": 1.2757, |
|
"step": 2028 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 1.2683823529411765e-05, |
|
"loss": 1.3085, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 1.2647058823529412e-05, |
|
"loss": 1.2862, |
|
"step": 2032 |
|
}, |
|
{ |
|
"epoch": 14.91, |
|
"learning_rate": 1.261029411764706e-05, |
|
"loss": 1.3401, |
|
"step": 2034 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"learning_rate": 1.2573529411764706e-05, |
|
"loss": 1.3122, |
|
"step": 2036 |
|
}, |
|
{ |
|
"epoch": 14.94, |
|
"learning_rate": 1.2536764705882354e-05, |
|
"loss": 1.3264, |
|
"step": 2038 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.3266, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 14.97, |
|
"learning_rate": 1.2463235294117648e-05, |
|
"loss": 1.2872, |
|
"step": 2042 |
|
}, |
|
{ |
|
"epoch": 14.98, |
|
"learning_rate": 1.2426470588235295e-05, |
|
"loss": 1.3349, |
|
"step": 2044 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 1.2389705882352942e-05, |
|
"loss": 1.288, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_gen_len": 744.8858024691358, |
|
"eval_loss": 1.4325816631317139, |
|
"eval_rouge1": 72.1615, |
|
"eval_rouge2": 41.1549, |
|
"eval_rougeL": 38.611, |
|
"eval_rougeLsum": 69.5977, |
|
"eval_runtime": 1630.2451, |
|
"eval_samples_per_second": 0.596, |
|
"eval_steps_per_second": 0.075, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 15.01, |
|
"learning_rate": 1.2352941176470589e-05, |
|
"loss": 1.3295, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 1.2316176470588236e-05, |
|
"loss": 1.2984, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 1.2279411764705883e-05, |
|
"loss": 1.263, |
|
"step": 2052 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 1.224264705882353e-05, |
|
"loss": 1.3002, |
|
"step": 2054 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"learning_rate": 1.2205882352941177e-05, |
|
"loss": 1.2865, |
|
"step": 2056 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"learning_rate": 1.2169117647058824e-05, |
|
"loss": 1.3056, |
|
"step": 2058 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 1.2132352941176471e-05, |
|
"loss": 1.3438, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 1.2095588235294118e-05, |
|
"loss": 1.3021, |
|
"step": 2062 |
|
}, |
|
{ |
|
"epoch": 15.13, |
|
"learning_rate": 1.2058823529411765e-05, |
|
"loss": 1.3076, |
|
"step": 2064 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"learning_rate": 1.2022058823529412e-05, |
|
"loss": 1.3062, |
|
"step": 2066 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"learning_rate": 1.198529411764706e-05, |
|
"loss": 1.2882, |
|
"step": 2068 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 1.1948529411764707e-05, |
|
"loss": 1.2838, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"learning_rate": 1.1911764705882354e-05, |
|
"loss": 1.2722, |
|
"step": 2072 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 1.1875e-05, |
|
"loss": 1.2971, |
|
"step": 2074 |
|
}, |
|
{ |
|
"epoch": 15.22, |
|
"learning_rate": 1.1838235294117648e-05, |
|
"loss": 1.2587, |
|
"step": 2076 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"learning_rate": 1.1801470588235295e-05, |
|
"loss": 1.2874, |
|
"step": 2078 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 1.3102, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"learning_rate": 1.1727941176470589e-05, |
|
"loss": 1.2912, |
|
"step": 2082 |
|
}, |
|
{ |
|
"epoch": 15.27, |
|
"learning_rate": 1.1691176470588236e-05, |
|
"loss": 1.2675, |
|
"step": 2084 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"learning_rate": 1.1654411764705883e-05, |
|
"loss": 1.2793, |
|
"step": 2086 |
|
}, |
|
{ |
|
"epoch": 15.3, |
|
"learning_rate": 1.161764705882353e-05, |
|
"loss": 1.2745, |
|
"step": 2088 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"learning_rate": 1.1580882352941177e-05, |
|
"loss": 1.3025, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 15.33, |
|
"learning_rate": 1.1544117647058824e-05, |
|
"loss": 1.3012, |
|
"step": 2092 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"learning_rate": 1.1507352941176471e-05, |
|
"loss": 1.2981, |
|
"step": 2094 |
|
}, |
|
{ |
|
"epoch": 15.36, |
|
"learning_rate": 1.1470588235294118e-05, |
|
"loss": 1.3368, |
|
"step": 2096 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 1.1433823529411765e-05, |
|
"loss": 1.3038, |
|
"step": 2098 |
|
}, |
|
{ |
|
"epoch": 15.39, |
|
"learning_rate": 1.1397058823529412e-05, |
|
"loss": 1.3169, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"learning_rate": 1.136029411764706e-05, |
|
"loss": 1.2868, |
|
"step": 2102 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 1.1323529411764707e-05, |
|
"loss": 1.3159, |
|
"step": 2104 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"learning_rate": 1.1286764705882354e-05, |
|
"loss": 1.2993, |
|
"step": 2106 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 1.125e-05, |
|
"loss": 1.2819, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 15.46, |
|
"learning_rate": 1.1213235294117648e-05, |
|
"loss": 1.2649, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"learning_rate": 1.1176470588235295e-05, |
|
"loss": 1.2955, |
|
"step": 2112 |
|
}, |
|
{ |
|
"epoch": 15.49, |
|
"learning_rate": 1.1139705882352942e-05, |
|
"loss": 1.2832, |
|
"step": 2114 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 1.1102941176470589e-05, |
|
"loss": 1.2808, |
|
"step": 2116 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"learning_rate": 1.1066176470588236e-05, |
|
"loss": 1.2859, |
|
"step": 2118 |
|
}, |
|
{ |
|
"epoch": 15.54, |
|
"learning_rate": 1.1029411764705883e-05, |
|
"loss": 1.3093, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 15.55, |
|
"learning_rate": 1.099264705882353e-05, |
|
"loss": 1.3145, |
|
"step": 2122 |
|
}, |
|
{ |
|
"epoch": 15.57, |
|
"learning_rate": 1.0955882352941177e-05, |
|
"loss": 1.3115, |
|
"step": 2124 |
|
}, |
|
{ |
|
"epoch": 15.58, |
|
"learning_rate": 1.0919117647058824e-05, |
|
"loss": 1.2994, |
|
"step": 2126 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 1.0882352941176471e-05, |
|
"loss": 1.3344, |
|
"step": 2128 |
|
}, |
|
{ |
|
"epoch": 15.61, |
|
"learning_rate": 1.0845588235294118e-05, |
|
"loss": 1.2821, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 15.63, |
|
"learning_rate": 1.0808823529411765e-05, |
|
"loss": 1.3163, |
|
"step": 2132 |
|
}, |
|
{ |
|
"epoch": 15.64, |
|
"learning_rate": 1.0772058823529413e-05, |
|
"loss": 1.3224, |
|
"step": 2134 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"learning_rate": 1.0735294117647058e-05, |
|
"loss": 1.3034, |
|
"step": 2136 |
|
}, |
|
{ |
|
"epoch": 15.67, |
|
"learning_rate": 1.0698529411764707e-05, |
|
"loss": 1.2512, |
|
"step": 2138 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 1.0661764705882354e-05, |
|
"loss": 1.2868, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"learning_rate": 1.0625e-05, |
|
"loss": 1.2871, |
|
"step": 2142 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 1.0588235294117648e-05, |
|
"loss": 1.2935, |
|
"step": 2144 |
|
}, |
|
{ |
|
"epoch": 15.73, |
|
"learning_rate": 1.0551470588235295e-05, |
|
"loss": 1.3185, |
|
"step": 2146 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"learning_rate": 1.0514705882352942e-05, |
|
"loss": 1.3362, |
|
"step": 2148 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 1.0477941176470589e-05, |
|
"loss": 1.305, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"learning_rate": 1.0441176470588236e-05, |
|
"loss": 1.284, |
|
"step": 2152 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"learning_rate": 1.0404411764705883e-05, |
|
"loss": 1.2949, |
|
"step": 2154 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 1.036764705882353e-05, |
|
"loss": 1.3266, |
|
"step": 2156 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 1.0330882352941176e-05, |
|
"loss": 1.3154, |
|
"step": 2158 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 1.0294117647058824e-05, |
|
"loss": 1.2759, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"learning_rate": 1.0257352941176471e-05, |
|
"loss": 1.297, |
|
"step": 2162 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"learning_rate": 1.0220588235294118e-05, |
|
"loss": 1.276, |
|
"step": 2164 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 1.0183823529411766e-05, |
|
"loss": 1.287, |
|
"step": 2166 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"learning_rate": 1.0147058823529413e-05, |
|
"loss": 1.2727, |
|
"step": 2168 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 1.011029411764706e-05, |
|
"loss": 1.2757, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"learning_rate": 1.0073529411764707e-05, |
|
"loss": 1.2815, |
|
"step": 2172 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"learning_rate": 1.0036764705882354e-05, |
|
"loss": 1.2777, |
|
"step": 2174 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2677, |
|
"step": 2176 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"learning_rate": 9.963235294117648e-06, |
|
"loss": 1.2776, |
|
"step": 2178 |
|
}, |
|
{ |
|
"epoch": 15.98, |
|
"learning_rate": 9.926470588235293e-06, |
|
"loss": 1.2892, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 9.889705882352942e-06, |
|
"loss": 1.2937, |
|
"step": 2182 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_gen_len": 755.533950617284, |
|
"eval_loss": 1.431522250175476, |
|
"eval_rouge1": 71.9783, |
|
"eval_rouge2": 40.9073, |
|
"eval_rougeL": 38.4263, |
|
"eval_rougeLsum": 69.4109, |
|
"eval_runtime": 1634.7834, |
|
"eval_samples_per_second": 0.595, |
|
"eval_steps_per_second": 0.075, |
|
"step": 2183 |
|
}, |
|
{ |
|
"epoch": 16.01, |
|
"learning_rate": 9.852941176470589e-06, |
|
"loss": 1.2779, |
|
"step": 2184 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"learning_rate": 9.816176470588236e-06, |
|
"loss": 1.2125, |
|
"step": 2186 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"learning_rate": 9.779411764705883e-06, |
|
"loss": 1.2782, |
|
"step": 2188 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 9.74264705882353e-06, |
|
"loss": 1.2828, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"learning_rate": 9.705882352941177e-06, |
|
"loss": 1.2797, |
|
"step": 2192 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 9.669117647058824e-06, |
|
"loss": 1.2381, |
|
"step": 2194 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 9.632352941176471e-06, |
|
"loss": 1.2742, |
|
"step": 2196 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"learning_rate": 9.595588235294119e-06, |
|
"loss": 1.2858, |
|
"step": 2198 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"learning_rate": 9.558823529411764e-06, |
|
"loss": 1.2895, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"learning_rate": 9.522058823529411e-06, |
|
"loss": 1.3103, |
|
"step": 2202 |
|
}, |
|
{ |
|
"epoch": 16.15, |
|
"learning_rate": 9.48529411764706e-06, |
|
"loss": 1.2326, |
|
"step": 2204 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"learning_rate": 9.448529411764707e-06, |
|
"loss": 1.2753, |
|
"step": 2206 |
|
}, |
|
{ |
|
"epoch": 16.18, |
|
"learning_rate": 9.411764705882354e-06, |
|
"loss": 1.2954, |
|
"step": 2208 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 1.2823, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 9.338235294117648e-06, |
|
"loss": 1.3075, |
|
"step": 2212 |
|
}, |
|
{ |
|
"epoch": 16.23, |
|
"learning_rate": 9.301470588235295e-06, |
|
"loss": 1.3029, |
|
"step": 2214 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 9.264705882352942e-06, |
|
"loss": 1.2876, |
|
"step": 2216 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"learning_rate": 9.227941176470589e-06, |
|
"loss": 1.2668, |
|
"step": 2218 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"learning_rate": 9.191176470588236e-06, |
|
"loss": 1.3387, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"learning_rate": 9.154411764705882e-06, |
|
"loss": 1.3048, |
|
"step": 2222 |
|
}, |
|
{ |
|
"epoch": 16.3, |
|
"learning_rate": 9.117647058823529e-06, |
|
"loss": 1.305, |
|
"step": 2224 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"learning_rate": 9.080882352941177e-06, |
|
"loss": 1.2966, |
|
"step": 2226 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"learning_rate": 9.044117647058824e-06, |
|
"loss": 1.2869, |
|
"step": 2228 |
|
}, |
|
{ |
|
"epoch": 16.34, |
|
"learning_rate": 9.007352941176471e-06, |
|
"loss": 1.2822, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 16.36, |
|
"learning_rate": 8.970588235294119e-06, |
|
"loss": 1.2886, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 8.933823529411766e-06, |
|
"loss": 1.3117, |
|
"step": 2234 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"learning_rate": 8.897058823529413e-06, |
|
"loss": 1.2609, |
|
"step": 2236 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 8.86029411764706e-06, |
|
"loss": 1.2904, |
|
"step": 2238 |
|
}, |
|
{ |
|
"epoch": 16.42, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 1.3034, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"learning_rate": 8.786764705882352e-06, |
|
"loss": 1.3141, |
|
"step": 2242 |
|
}, |
|
{ |
|
"epoch": 16.45, |
|
"learning_rate": 8.75e-06, |
|
"loss": 1.3099, |
|
"step": 2244 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"learning_rate": 8.713235294117646e-06, |
|
"loss": 1.2894, |
|
"step": 2246 |
|
}, |
|
{ |
|
"epoch": 16.48, |
|
"learning_rate": 8.676470588235295e-06, |
|
"loss": 1.3491, |
|
"step": 2248 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"learning_rate": 8.639705882352942e-06, |
|
"loss": 1.288, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 16.51, |
|
"learning_rate": 8.60294117647059e-06, |
|
"loss": 1.2939, |
|
"step": 2252 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"learning_rate": 8.566176470588236e-06, |
|
"loss": 1.238, |
|
"step": 2254 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"learning_rate": 8.529411764705883e-06, |
|
"loss": 1.2663, |
|
"step": 2256 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"learning_rate": 8.49264705882353e-06, |
|
"loss": 1.2732, |
|
"step": 2258 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 8.455882352941177e-06, |
|
"loss": 1.294, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 8.419117647058824e-06, |
|
"loss": 1.3043, |
|
"step": 2262 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"learning_rate": 8.38235294117647e-06, |
|
"loss": 1.2902, |
|
"step": 2264 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 8.345588235294117e-06, |
|
"loss": 1.3337, |
|
"step": 2266 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"learning_rate": 8.308823529411764e-06, |
|
"loss": 1.2245, |
|
"step": 2268 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 8.272058823529413e-06, |
|
"loss": 1.3017, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 16.65, |
|
"learning_rate": 8.23529411764706e-06, |
|
"loss": 1.3169, |
|
"step": 2272 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 8.198529411764707e-06, |
|
"loss": 1.2641, |
|
"step": 2274 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"learning_rate": 8.161764705882354e-06, |
|
"loss": 1.2978, |
|
"step": 2276 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 1.276, |
|
"step": 2278 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"learning_rate": 8.088235294117648e-06, |
|
"loss": 1.3104, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"learning_rate": 8.051470588235295e-06, |
|
"loss": 1.2719, |
|
"step": 2282 |
|
}, |
|
{ |
|
"epoch": 16.74, |
|
"learning_rate": 8.014705882352942e-06, |
|
"loss": 1.3139, |
|
"step": 2284 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"learning_rate": 7.977941176470588e-06, |
|
"loss": 1.2831, |
|
"step": 2286 |
|
}, |
|
{ |
|
"epoch": 16.77, |
|
"learning_rate": 7.941176470588235e-06, |
|
"loss": 1.2787, |
|
"step": 2288 |
|
}, |
|
{ |
|
"epoch": 16.78, |
|
"learning_rate": 7.904411764705882e-06, |
|
"loss": 1.2411, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 7.86764705882353e-06, |
|
"loss": 1.2503, |
|
"step": 2292 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"learning_rate": 7.830882352941177e-06, |
|
"loss": 1.2947, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 16.83, |
|
"learning_rate": 7.794117647058825e-06, |
|
"loss": 1.2967, |
|
"step": 2296 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"learning_rate": 7.757352941176472e-06, |
|
"loss": 1.2755, |
|
"step": 2298 |
|
}, |
|
{ |
|
"epoch": 16.86, |
|
"learning_rate": 7.720588235294119e-06, |
|
"loss": 1.2765, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"learning_rate": 7.683823529411766e-06, |
|
"loss": 1.2852, |
|
"step": 2302 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"learning_rate": 7.647058823529413e-06, |
|
"loss": 1.2997, |
|
"step": 2304 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 7.610294117647058e-06, |
|
"loss": 1.2993, |
|
"step": 2306 |
|
}, |
|
{ |
|
"epoch": 16.92, |
|
"learning_rate": 7.573529411764706e-06, |
|
"loss": 1.2849, |
|
"step": 2308 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"learning_rate": 7.536764705882353e-06, |
|
"loss": 1.2966, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.3007, |
|
"step": 2312 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"learning_rate": 7.463235294117647e-06, |
|
"loss": 1.3096, |
|
"step": 2314 |
|
}, |
|
{ |
|
"epoch": 16.97, |
|
"learning_rate": 7.426470588235294e-06, |
|
"loss": 1.2897, |
|
"step": 2316 |
|
}, |
|
{ |
|
"epoch": 16.99, |
|
"learning_rate": 7.389705882352941e-06, |
|
"loss": 1.258, |
|
"step": 2318 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_gen_len": 734.6399176954733, |
|
"eval_loss": 1.4328014850616455, |
|
"eval_rouge1": 72.0298, |
|
"eval_rouge2": 40.931, |
|
"eval_rougeL": 38.4845, |
|
"eval_rougeLsum": 69.4823, |
|
"eval_runtime": 1579.2093, |
|
"eval_samples_per_second": 0.615, |
|
"eval_steps_per_second": 0.077, |
|
"step": 2319 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 7.3529411764705884e-06, |
|
"loss": 1.2402, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 17.02, |
|
"learning_rate": 7.316176470588236e-06, |
|
"loss": 1.3125, |
|
"step": 2322 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 7.279411764705883e-06, |
|
"loss": 1.2644, |
|
"step": 2324 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 7.2426470588235305e-06, |
|
"loss": 1.2538, |
|
"step": 2326 |
|
}, |
|
{ |
|
"epoch": 17.06, |
|
"learning_rate": 7.205882352941176e-06, |
|
"loss": 1.3019, |
|
"step": 2328 |
|
}, |
|
{ |
|
"epoch": 17.08, |
|
"learning_rate": 7.169117647058824e-06, |
|
"loss": 1.283, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 7.132352941176471e-06, |
|
"loss": 1.2648, |
|
"step": 2332 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"learning_rate": 7.095588235294118e-06, |
|
"loss": 1.2731, |
|
"step": 2334 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 7.058823529411765e-06, |
|
"loss": 1.2447, |
|
"step": 2336 |
|
}, |
|
{ |
|
"epoch": 17.14, |
|
"learning_rate": 7.022058823529412e-06, |
|
"loss": 1.3294, |
|
"step": 2338 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"learning_rate": 6.985294117647059e-06, |
|
"loss": 1.326, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 17.17, |
|
"learning_rate": 6.948529411764706e-06, |
|
"loss": 1.285, |
|
"step": 2342 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"learning_rate": 6.911764705882354e-06, |
|
"loss": 1.3157, |
|
"step": 2344 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 6.875000000000001e-06, |
|
"loss": 1.2731, |
|
"step": 2346 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"learning_rate": 6.838235294117648e-06, |
|
"loss": 1.2805, |
|
"step": 2348 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"learning_rate": 6.8014705882352935e-06, |
|
"loss": 1.3002, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"learning_rate": 6.7647058823529414e-06, |
|
"loss": 1.2717, |
|
"step": 2352 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"learning_rate": 6.7279411764705885e-06, |
|
"loss": 1.2338, |
|
"step": 2354 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 6.6911764705882356e-06, |
|
"loss": 1.2363, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 17.28, |
|
"learning_rate": 6.654411764705883e-06, |
|
"loss": 1.2774, |
|
"step": 2358 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"learning_rate": 6.61764705882353e-06, |
|
"loss": 1.2517, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 17.31, |
|
"learning_rate": 6.580882352941177e-06, |
|
"loss": 1.2792, |
|
"step": 2362 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"learning_rate": 6.544117647058824e-06, |
|
"loss": 1.2314, |
|
"step": 2364 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 6.507352941176472e-06, |
|
"loss": 1.2584, |
|
"step": 2366 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"learning_rate": 6.470588235294119e-06, |
|
"loss": 1.3062, |
|
"step": 2368 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 6.433823529411764e-06, |
|
"loss": 1.2593, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"learning_rate": 6.397058823529411e-06, |
|
"loss": 1.2988, |
|
"step": 2372 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 6.360294117647059e-06, |
|
"loss": 1.2704, |
|
"step": 2374 |
|
}, |
|
{ |
|
"epoch": 17.41, |
|
"learning_rate": 6.323529411764706e-06, |
|
"loss": 1.2924, |
|
"step": 2376 |
|
}, |
|
{ |
|
"epoch": 17.43, |
|
"learning_rate": 6.286764705882353e-06, |
|
"loss": 1.2964, |
|
"step": 2378 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.2825, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"learning_rate": 6.213235294117647e-06, |
|
"loss": 1.2956, |
|
"step": 2382 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"learning_rate": 6.1764705882352944e-06, |
|
"loss": 1.2423, |
|
"step": 2384 |
|
}, |
|
{ |
|
"epoch": 17.49, |
|
"learning_rate": 6.1397058823529415e-06, |
|
"loss": 1.2824, |
|
"step": 2386 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 6.1029411764705885e-06, |
|
"loss": 1.2939, |
|
"step": 2388 |
|
}, |
|
{ |
|
"epoch": 17.52, |
|
"learning_rate": 6.066176470588236e-06, |
|
"loss": 1.2747, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 6.029411764705883e-06, |
|
"loss": 1.2501, |
|
"step": 2392 |
|
}, |
|
{ |
|
"epoch": 17.55, |
|
"learning_rate": 5.99264705882353e-06, |
|
"loss": 1.2715, |
|
"step": 2394 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 5.955882352941177e-06, |
|
"loss": 1.2974, |
|
"step": 2396 |
|
}, |
|
{ |
|
"epoch": 17.58, |
|
"learning_rate": 5.919117647058824e-06, |
|
"loss": 1.2486, |
|
"step": 2398 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 1.2593, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 5.845588235294118e-06, |
|
"loss": 1.2523, |
|
"step": 2402 |
|
}, |
|
{ |
|
"epoch": 17.62, |
|
"learning_rate": 5.808823529411765e-06, |
|
"loss": 1.2977, |
|
"step": 2404 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"learning_rate": 5.772058823529412e-06, |
|
"loss": 1.2788, |
|
"step": 2406 |
|
}, |
|
{ |
|
"epoch": 17.65, |
|
"learning_rate": 5.735294117647059e-06, |
|
"loss": 1.2569, |
|
"step": 2408 |
|
}, |
|
{ |
|
"epoch": 17.66, |
|
"learning_rate": 5.698529411764706e-06, |
|
"loss": 1.2709, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"learning_rate": 5.661764705882353e-06, |
|
"loss": 1.2801, |
|
"step": 2412 |
|
}, |
|
{ |
|
"epoch": 17.69, |
|
"learning_rate": 5.625e-06, |
|
"loss": 1.2747, |
|
"step": 2414 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 5.588235294117647e-06, |
|
"loss": 1.2934, |
|
"step": 2416 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"learning_rate": 5.5514705882352945e-06, |
|
"loss": 1.2478, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 5.5147058823529415e-06, |
|
"loss": 1.3025, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"learning_rate": 5.477941176470589e-06, |
|
"loss": 1.303, |
|
"step": 2422 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"learning_rate": 5.441176470588236e-06, |
|
"loss": 1.2874, |
|
"step": 2424 |
|
}, |
|
{ |
|
"epoch": 17.78, |
|
"learning_rate": 5.404411764705883e-06, |
|
"loss": 1.2947, |
|
"step": 2426 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"learning_rate": 5.367647058823529e-06, |
|
"loss": 1.3189, |
|
"step": 2428 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"learning_rate": 5.330882352941177e-06, |
|
"loss": 1.2738, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 17.83, |
|
"learning_rate": 5.294117647058824e-06, |
|
"loss": 1.3261, |
|
"step": 2432 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"learning_rate": 5.257352941176471e-06, |
|
"loss": 1.313, |
|
"step": 2434 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"learning_rate": 5.220588235294118e-06, |
|
"loss": 1.2975, |
|
"step": 2436 |
|
}, |
|
{ |
|
"epoch": 17.87, |
|
"learning_rate": 5.183823529411765e-06, |
|
"loss": 1.2727, |
|
"step": 2438 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 5.147058823529412e-06, |
|
"loss": 1.2827, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 17.9, |
|
"learning_rate": 5.110294117647059e-06, |
|
"loss": 1.2173, |
|
"step": 2442 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 5.073529411764706e-06, |
|
"loss": 1.2882, |
|
"step": 2444 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"learning_rate": 5.036764705882353e-06, |
|
"loss": 1.2659, |
|
"step": 2446 |
|
}, |
|
{ |
|
"epoch": 17.94, |
|
"learning_rate": 5e-06, |
|
"loss": 1.2641, |
|
"step": 2448 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"learning_rate": 4.963235294117647e-06, |
|
"loss": 1.2967, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 4.9264705882352945e-06, |
|
"loss": 1.302, |
|
"step": 2452 |
|
}, |
|
{ |
|
"epoch": 17.99, |
|
"learning_rate": 4.889705882352942e-06, |
|
"loss": 1.2617, |
|
"step": 2454 |
|
}, |
|
{ |
|
"epoch": 17.99, |
|
"eval_gen_len": 744.7067901234568, |
|
"eval_loss": 1.4336270093917847, |
|
"eval_rouge1": 71.9488, |
|
"eval_rouge2": 40.8816, |
|
"eval_rougeL": 38.4521, |
|
"eval_rougeLsum": 69.4151, |
|
"eval_runtime": 1622.2621, |
|
"eval_samples_per_second": 0.599, |
|
"eval_steps_per_second": 0.075, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 4.852941176470589e-06, |
|
"loss": 1.3316, |
|
"step": 2456 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"learning_rate": 4.816176470588236e-06, |
|
"loss": 1.271, |
|
"step": 2458 |
|
}, |
|
{ |
|
"epoch": 18.03, |
|
"learning_rate": 4.779411764705882e-06, |
|
"loss": 1.2762, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 18.04, |
|
"learning_rate": 4.74264705882353e-06, |
|
"loss": 1.2747, |
|
"step": 2462 |
|
}, |
|
{ |
|
"epoch": 18.06, |
|
"learning_rate": 4.705882352941177e-06, |
|
"loss": 1.2935, |
|
"step": 2464 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 4.669117647058824e-06, |
|
"loss": 1.2907, |
|
"step": 2466 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"learning_rate": 4.632352941176471e-06, |
|
"loss": 1.2581, |
|
"step": 2468 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"learning_rate": 4.595588235294118e-06, |
|
"loss": 1.3023, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 4.558823529411764e-06, |
|
"loss": 1.2795, |
|
"step": 2472 |
|
}, |
|
{ |
|
"epoch": 18.13, |
|
"learning_rate": 4.522058823529412e-06, |
|
"loss": 1.2715, |
|
"step": 2474 |
|
}, |
|
{ |
|
"epoch": 18.15, |
|
"learning_rate": 4.485294117647059e-06, |
|
"loss": 1.2621, |
|
"step": 2476 |
|
}, |
|
{ |
|
"epoch": 18.16, |
|
"learning_rate": 4.448529411764706e-06, |
|
"loss": 1.2882, |
|
"step": 2478 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 1.2765, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"learning_rate": 4.375e-06, |
|
"loss": 1.2678, |
|
"step": 2482 |
|
}, |
|
{ |
|
"epoch": 18.21, |
|
"learning_rate": 4.3382352941176475e-06, |
|
"loss": 1.2843, |
|
"step": 2484 |
|
}, |
|
{ |
|
"epoch": 18.22, |
|
"learning_rate": 4.301470588235295e-06, |
|
"loss": 1.3001, |
|
"step": 2486 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 4.264705882352942e-06, |
|
"loss": 1.2784, |
|
"step": 2488 |
|
}, |
|
{ |
|
"epoch": 18.25, |
|
"learning_rate": 4.227941176470589e-06, |
|
"loss": 1.3116, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"learning_rate": 4.191176470588235e-06, |
|
"loss": 1.2721, |
|
"step": 2492 |
|
}, |
|
{ |
|
"epoch": 18.28, |
|
"learning_rate": 4.154411764705882e-06, |
|
"loss": 1.2575, |
|
"step": 2494 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 4.11764705882353e-06, |
|
"loss": 1.2532, |
|
"step": 2496 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"learning_rate": 4.080882352941177e-06, |
|
"loss": 1.3147, |
|
"step": 2498 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"learning_rate": 4.044117647058824e-06, |
|
"loss": 1.2965, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 18.34, |
|
"learning_rate": 4.007352941176471e-06, |
|
"loss": 1.2836, |
|
"step": 2502 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"learning_rate": 3.970588235294117e-06, |
|
"loss": 1.276, |
|
"step": 2504 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 3.933823529411765e-06, |
|
"loss": 1.2781, |
|
"step": 2506 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"learning_rate": 3.897058823529412e-06, |
|
"loss": 1.2817, |
|
"step": 2508 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 3.860294117647059e-06, |
|
"loss": 1.256, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"learning_rate": 3.823529411764706e-06, |
|
"loss": 1.2359, |
|
"step": 2512 |
|
}, |
|
{ |
|
"epoch": 18.43, |
|
"learning_rate": 3.786764705882353e-06, |
|
"loss": 1.2867, |
|
"step": 2514 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"learning_rate": 3.75e-06, |
|
"loss": 1.2644, |
|
"step": 2516 |
|
}, |
|
{ |
|
"epoch": 18.46, |
|
"learning_rate": 3.713235294117647e-06, |
|
"loss": 1.2818, |
|
"step": 2518 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"learning_rate": 3.6764705882352942e-06, |
|
"loss": 1.2902, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"learning_rate": 3.6397058823529417e-06, |
|
"loss": 1.2513, |
|
"step": 2522 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 3.602941176470588e-06, |
|
"loss": 1.2876, |
|
"step": 2524 |
|
}, |
|
{ |
|
"epoch": 18.51, |
|
"learning_rate": 3.5661764705882354e-06, |
|
"loss": 1.2579, |
|
"step": 2526 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"learning_rate": 3.5294117647058825e-06, |
|
"loss": 1.2587, |
|
"step": 2528 |
|
}, |
|
{ |
|
"epoch": 18.54, |
|
"learning_rate": 3.4926470588235295e-06, |
|
"loss": 1.2613, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"learning_rate": 3.455882352941177e-06, |
|
"loss": 1.3092, |
|
"step": 2532 |
|
}, |
|
{ |
|
"epoch": 18.57, |
|
"learning_rate": 3.419117647058824e-06, |
|
"loss": 1.2454, |
|
"step": 2534 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"learning_rate": 3.3823529411764707e-06, |
|
"loss": 1.254, |
|
"step": 2536 |
|
}, |
|
{ |
|
"epoch": 18.6, |
|
"learning_rate": 3.3455882352941178e-06, |
|
"loss": 1.2796, |
|
"step": 2538 |
|
}, |
|
{ |
|
"epoch": 18.62, |
|
"learning_rate": 3.308823529411765e-06, |
|
"loss": 1.2328, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"learning_rate": 3.272058823529412e-06, |
|
"loss": 1.2987, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"learning_rate": 3.2352941176470594e-06, |
|
"loss": 1.2678, |
|
"step": 2544 |
|
}, |
|
{ |
|
"epoch": 18.66, |
|
"learning_rate": 3.1985294117647056e-06, |
|
"loss": 1.2742, |
|
"step": 2546 |
|
}, |
|
{ |
|
"epoch": 18.68, |
|
"learning_rate": 3.161764705882353e-06, |
|
"loss": 1.285, |
|
"step": 2548 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.2575, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"learning_rate": 3.0882352941176472e-06, |
|
"loss": 1.2807, |
|
"step": 2552 |
|
}, |
|
{ |
|
"epoch": 18.72, |
|
"learning_rate": 3.0514705882352943e-06, |
|
"loss": 1.2584, |
|
"step": 2554 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"learning_rate": 3.0147058823529413e-06, |
|
"loss": 1.2162, |
|
"step": 2556 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 2.9779411764705884e-06, |
|
"loss": 1.2371, |
|
"step": 2558 |
|
}, |
|
{ |
|
"epoch": 18.76, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 1.2401, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 18.78, |
|
"learning_rate": 2.9044117647058825e-06, |
|
"loss": 1.2498, |
|
"step": 2562 |
|
}, |
|
{ |
|
"epoch": 18.79, |
|
"learning_rate": 2.8676470588235296e-06, |
|
"loss": 1.2554, |
|
"step": 2564 |
|
}, |
|
{ |
|
"epoch": 18.81, |
|
"learning_rate": 2.8308823529411766e-06, |
|
"loss": 1.2683, |
|
"step": 2566 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"learning_rate": 2.7941176470588237e-06, |
|
"loss": 1.2478, |
|
"step": 2568 |
|
}, |
|
{ |
|
"epoch": 18.84, |
|
"learning_rate": 2.7573529411764708e-06, |
|
"loss": 1.2815, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 18.85, |
|
"learning_rate": 2.720588235294118e-06, |
|
"loss": 1.2642, |
|
"step": 2572 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 2.6838235294117645e-06, |
|
"loss": 1.264, |
|
"step": 2574 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 2.647058823529412e-06, |
|
"loss": 1.2911, |
|
"step": 2576 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"learning_rate": 2.610294117647059e-06, |
|
"loss": 1.3437, |
|
"step": 2578 |
|
}, |
|
{ |
|
"epoch": 18.91, |
|
"learning_rate": 2.573529411764706e-06, |
|
"loss": 1.2873, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 2.536764705882353e-06, |
|
"loss": 1.3098, |
|
"step": 2582 |
|
}, |
|
{ |
|
"epoch": 18.94, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.2848, |
|
"step": 2584 |
|
}, |
|
{ |
|
"epoch": 18.95, |
|
"learning_rate": 2.4632352941176473e-06, |
|
"loss": 1.26, |
|
"step": 2586 |
|
}, |
|
{ |
|
"epoch": 18.97, |
|
"learning_rate": 2.4264705882352943e-06, |
|
"loss": 1.2744, |
|
"step": 2588 |
|
}, |
|
{ |
|
"epoch": 18.98, |
|
"learning_rate": 2.389705882352941e-06, |
|
"loss": 1.2806, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 2.3529411764705885e-06, |
|
"loss": 1.2864, |
|
"step": 2592 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_gen_len": 744.2448559670781, |
|
"eval_loss": 1.4346004724502563, |
|
"eval_rouge1": 72.1334, |
|
"eval_rouge2": 40.9965, |
|
"eval_rougeL": 38.5682, |
|
"eval_rougeLsum": 69.5666, |
|
"eval_runtime": 1612.4207, |
|
"eval_samples_per_second": 0.603, |
|
"eval_steps_per_second": 0.076, |
|
"step": 2592 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 2.3161764705882355e-06, |
|
"loss": 1.2617, |
|
"step": 2594 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 2.279411764705882e-06, |
|
"loss": 1.2852, |
|
"step": 2596 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"learning_rate": 2.2426470588235296e-06, |
|
"loss": 1.2548, |
|
"step": 2598 |
|
}, |
|
{ |
|
"epoch": 19.06, |
|
"learning_rate": 2.2058823529411767e-06, |
|
"loss": 1.2467, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"learning_rate": 2.1691176470588238e-06, |
|
"loss": 1.2457, |
|
"step": 2602 |
|
}, |
|
{ |
|
"epoch": 19.09, |
|
"learning_rate": 2.132352941176471e-06, |
|
"loss": 1.2929, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 19.1, |
|
"learning_rate": 2.0955882352941175e-06, |
|
"loss": 1.2768, |
|
"step": 2606 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 2.058823529411765e-06, |
|
"loss": 1.2755, |
|
"step": 2608 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 2.022058823529412e-06, |
|
"loss": 1.2621, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 19.14, |
|
"learning_rate": 1.9852941176470586e-06, |
|
"loss": 1.2802, |
|
"step": 2612 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"learning_rate": 1.948529411764706e-06, |
|
"loss": 1.2528, |
|
"step": 2614 |
|
}, |
|
{ |
|
"epoch": 19.17, |
|
"learning_rate": 1.911764705882353e-06, |
|
"loss": 1.2608, |
|
"step": 2616 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"learning_rate": 1.875e-06, |
|
"loss": 1.286, |
|
"step": 2618 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 1.8382352941176471e-06, |
|
"loss": 1.2803, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"learning_rate": 1.801470588235294e-06, |
|
"loss": 1.2816, |
|
"step": 2622 |
|
}, |
|
{ |
|
"epoch": 19.23, |
|
"learning_rate": 1.7647058823529412e-06, |
|
"loss": 1.2336, |
|
"step": 2624 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"learning_rate": 1.7279411764705885e-06, |
|
"loss": 1.278, |
|
"step": 2626 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"learning_rate": 1.6911764705882354e-06, |
|
"loss": 1.3079, |
|
"step": 2628 |
|
}, |
|
{ |
|
"epoch": 19.28, |
|
"learning_rate": 1.6544117647058824e-06, |
|
"loss": 1.2659, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 19.29, |
|
"learning_rate": 1.6176470588235297e-06, |
|
"loss": 1.2686, |
|
"step": 2632 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"learning_rate": 1.5808823529411765e-06, |
|
"loss": 1.2069, |
|
"step": 2634 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"learning_rate": 1.5441176470588236e-06, |
|
"loss": 1.2774, |
|
"step": 2636 |
|
}, |
|
{ |
|
"epoch": 19.33, |
|
"learning_rate": 1.5073529411764707e-06, |
|
"loss": 1.248, |
|
"step": 2638 |
|
}, |
|
{ |
|
"epoch": 19.35, |
|
"learning_rate": 1.4705882352941177e-06, |
|
"loss": 1.2786, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"learning_rate": 1.4338235294117648e-06, |
|
"loss": 1.251, |
|
"step": 2642 |
|
}, |
|
{ |
|
"epoch": 19.38, |
|
"learning_rate": 1.3970588235294119e-06, |
|
"loss": 1.3055, |
|
"step": 2644 |
|
}, |
|
{ |
|
"epoch": 19.39, |
|
"learning_rate": 1.360294117647059e-06, |
|
"loss": 1.2598, |
|
"step": 2646 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"learning_rate": 1.323529411764706e-06, |
|
"loss": 1.2479, |
|
"step": 2648 |
|
}, |
|
{ |
|
"epoch": 19.42, |
|
"learning_rate": 1.286764705882353e-06, |
|
"loss": 1.2415, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 19.44, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.2595, |
|
"step": 2652 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"learning_rate": 1.2132352941176472e-06, |
|
"loss": 1.2443, |
|
"step": 2654 |
|
}, |
|
{ |
|
"epoch": 19.47, |
|
"learning_rate": 1.1764705882352942e-06, |
|
"loss": 1.318, |
|
"step": 2656 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 1.139705882352941e-06, |
|
"loss": 1.2591, |
|
"step": 2658 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 1.1029411764705884e-06, |
|
"loss": 1.3034, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 19.51, |
|
"learning_rate": 1.0661764705882354e-06, |
|
"loss": 1.2829, |
|
"step": 2662 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"learning_rate": 1.0294117647058825e-06, |
|
"loss": 1.2802, |
|
"step": 2664 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 9.926470588235293e-07, |
|
"loss": 1.2556, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 19.55, |
|
"learning_rate": 9.558823529411766e-07, |
|
"loss": 1.3106, |
|
"step": 2668 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"learning_rate": 9.191176470588236e-07, |
|
"loss": 1.2525, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"learning_rate": 8.823529411764706e-07, |
|
"loss": 1.2987, |
|
"step": 2672 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 8.455882352941177e-07, |
|
"loss": 1.2641, |
|
"step": 2674 |
|
}, |
|
{ |
|
"epoch": 19.61, |
|
"learning_rate": 8.088235294117648e-07, |
|
"loss": 1.2668, |
|
"step": 2676 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"learning_rate": 7.720588235294118e-07, |
|
"loss": 1.3005, |
|
"step": 2678 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"learning_rate": 7.352941176470589e-07, |
|
"loss": 1.282, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"learning_rate": 6.985294117647059e-07, |
|
"loss": 1.2671, |
|
"step": 2682 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"learning_rate": 6.61764705882353e-07, |
|
"loss": 1.2342, |
|
"step": 2684 |
|
}, |
|
{ |
|
"epoch": 19.69, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.2571, |
|
"step": 2686 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"learning_rate": 5.882352941176471e-07, |
|
"loss": 1.3025, |
|
"step": 2688 |
|
}, |
|
{ |
|
"epoch": 19.72, |
|
"learning_rate": 5.514705882352942e-07, |
|
"loss": 1.2558, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 19.73, |
|
"learning_rate": 5.147058823529412e-07, |
|
"loss": 1.2856, |
|
"step": 2692 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 4.779411764705883e-07, |
|
"loss": 1.2436, |
|
"step": 2694 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"learning_rate": 4.411764705882353e-07, |
|
"loss": 1.2663, |
|
"step": 2696 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 4.044117647058824e-07, |
|
"loss": 1.251, |
|
"step": 2698 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"learning_rate": 3.6764705882352943e-07, |
|
"loss": 1.2936, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"learning_rate": 3.308823529411765e-07, |
|
"loss": 1.2658, |
|
"step": 2702 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 2.9411764705882356e-07, |
|
"loss": 1.2671, |
|
"step": 2704 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"learning_rate": 2.573529411764706e-07, |
|
"loss": 1.272, |
|
"step": 2706 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"learning_rate": 2.2058823529411765e-07, |
|
"loss": 1.2926, |
|
"step": 2708 |
|
}, |
|
{ |
|
"epoch": 19.86, |
|
"learning_rate": 1.8382352941176472e-07, |
|
"loss": 1.2426, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 1.4705882352941178e-07, |
|
"loss": 1.301, |
|
"step": 2712 |
|
}, |
|
{ |
|
"epoch": 19.89, |
|
"learning_rate": 1.1029411764705883e-07, |
|
"loss": 1.324, |
|
"step": 2714 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"learning_rate": 7.352941176470589e-08, |
|
"loss": 1.3007, |
|
"step": 2716 |
|
}, |
|
{ |
|
"epoch": 19.92, |
|
"learning_rate": 3.6764705882352945e-08, |
|
"loss": 1.29, |
|
"step": 2718 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"learning_rate": 0.0, |
|
"loss": 1.2936, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"eval_gen_len": 744.4588477366256, |
|
"eval_loss": 1.4350636005401611, |
|
"eval_rouge1": 72.0397, |
|
"eval_rouge2": 40.9431, |
|
"eval_rougeL": 38.4161, |
|
"eval_rougeLsum": 69.5028, |
|
"eval_runtime": 1636.79, |
|
"eval_samples_per_second": 0.594, |
|
"eval_steps_per_second": 0.075, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"step": 2720, |
|
"total_flos": 7.21120699038892e+17, |
|
"train_loss": 1.4114765528370352, |
|
"train_runtime": 122523.8715, |
|
"train_samples_per_second": 2.85, |
|
"train_steps_per_second": 0.022 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 2720, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"total_flos": 7.21120699038892e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|