|
{ |
|
"best_metric": 0.9316640584246219, |
|
"best_model_checkpoint": "SL-CvT/checkpoint-5040", |
|
"epoch": 100.0, |
|
"global_step": 6000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.4351, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.4316, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.3634, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.3129, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 1.2931, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5e-06, |
|
"loss": 1.2379, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7245696400625978, |
|
"eval_f1": 0.6421567406482025, |
|
"eval_loss": 1.0716278553009033, |
|
"eval_roc_auc": 0.7323469324820772, |
|
"eval_runtime": 30.9342, |
|
"eval_samples_per_second": 61.97, |
|
"eval_steps_per_second": 1.94, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 1.2069, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.1671, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.1401, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.1452, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 1.0799, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1e-05, |
|
"loss": 1.0186, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.729264475743349, |
|
"eval_f1": 0.6425490906446382, |
|
"eval_loss": 0.847660481929779, |
|
"eval_roc_auc": 0.7879043691585872, |
|
"eval_runtime": 30.8873, |
|
"eval_samples_per_second": 62.064, |
|
"eval_steps_per_second": 1.943, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.0833333333333334e-05, |
|
"loss": 1.0281, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 0.9652, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.9686, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.9573, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.4166666666666668e-05, |
|
"loss": 0.9334, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.9433, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7537819509650495, |
|
"eval_f1": 0.705960518775639, |
|
"eval_loss": 0.7472825050354004, |
|
"eval_roc_auc": 0.845372294497077, |
|
"eval_runtime": 31.0251, |
|
"eval_samples_per_second": 61.789, |
|
"eval_steps_per_second": 1.934, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.5833333333333333e-05, |
|
"loss": 0.935, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.8971, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.75e-05, |
|
"loss": 0.8269, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.8333333333333333e-05, |
|
"loss": 0.9014, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.9166666666666667e-05, |
|
"loss": 0.8513, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8644, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.766301512780386, |
|
"eval_f1": 0.7188479081954189, |
|
"eval_loss": 0.6830809712409973, |
|
"eval_roc_auc": 0.8696485953997086, |
|
"eval_runtime": 30.9674, |
|
"eval_samples_per_second": 61.904, |
|
"eval_steps_per_second": 1.938, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 0.8384, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.1666666666666667e-05, |
|
"loss": 0.8517, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.7973, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 0.769, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.4166666666666667e-05, |
|
"loss": 0.8245, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.7985, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.7798643714136672, |
|
"eval_f1": 0.7409260554438973, |
|
"eval_loss": 0.641985297203064, |
|
"eval_roc_auc": 0.8943028148426463, |
|
"eval_runtime": 30.9292, |
|
"eval_samples_per_second": 61.98, |
|
"eval_steps_per_second": 1.94, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.5833333333333336e-05, |
|
"loss": 0.7784, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.8006, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.7302, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.8333333333333335e-05, |
|
"loss": 0.7941, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 0.7924, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 3e-05, |
|
"loss": 0.7322, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.8101199791340636, |
|
"eval_f1": 0.7885994269404484, |
|
"eval_loss": 0.5712574124336243, |
|
"eval_roc_auc": 0.9195657365645341, |
|
"eval_runtime": 30.9327, |
|
"eval_samples_per_second": 61.973, |
|
"eval_steps_per_second": 1.94, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 3.0833333333333335e-05, |
|
"loss": 0.7336, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 3.1666666666666666e-05, |
|
"loss": 0.663, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 0.6591, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.7874, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 3.4166666666666666e-05, |
|
"loss": 0.6922, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.725, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.8189880020865936, |
|
"eval_f1": 0.7989427228016346, |
|
"eval_loss": 0.5310792922973633, |
|
"eval_roc_auc": 0.9324356204838778, |
|
"eval_runtime": 30.9897, |
|
"eval_samples_per_second": 61.859, |
|
"eval_steps_per_second": 1.936, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 3.5833333333333335e-05, |
|
"loss": 0.6874, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 0.6595, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.6524, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 3.8333333333333334e-05, |
|
"loss": 0.6605, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 3.9166666666666665e-05, |
|
"loss": 0.7416, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 4e-05, |
|
"loss": 0.6529, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.8116849243609807, |
|
"eval_f1": 0.7851628741512076, |
|
"eval_loss": 0.5246216654777527, |
|
"eval_roc_auc": 0.9404167887041862, |
|
"eval_runtime": 30.9255, |
|
"eval_samples_per_second": 61.988, |
|
"eval_steps_per_second": 1.94, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 4.0833333333333334e-05, |
|
"loss": 0.6527, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.6512, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 4.25e-05, |
|
"loss": 0.6601, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 0.6674, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 4.4166666666666665e-05, |
|
"loss": 0.6243, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.6224, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.8440271257172666, |
|
"eval_f1": 0.8282011911330186, |
|
"eval_loss": 0.4597983658313751, |
|
"eval_roc_auc": 0.9516750726473929, |
|
"eval_runtime": 30.9543, |
|
"eval_samples_per_second": 61.93, |
|
"eval_steps_per_second": 1.938, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 0.6133, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 0.6199, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 4.75e-05, |
|
"loss": 0.6523, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 4.8333333333333334e-05, |
|
"loss": 0.6189, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 4.9166666666666665e-05, |
|
"loss": 0.6186, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5e-05, |
|
"loss": 0.6315, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.8528951486697965, |
|
"eval_f1": 0.8456632918948623, |
|
"eval_loss": 0.4362500011920929, |
|
"eval_roc_auc": 0.9584596745900972, |
|
"eval_runtime": 30.9898, |
|
"eval_samples_per_second": 61.859, |
|
"eval_steps_per_second": 1.936, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 4.9907407407407406e-05, |
|
"loss": 0.596, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 4.981481481481482e-05, |
|
"loss": 0.5767, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.972222222222223e-05, |
|
"loss": 0.5942, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 4.962962962962963e-05, |
|
"loss": 0.5511, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 4.9537037037037035e-05, |
|
"loss": 0.5505, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.9444444444444446e-05, |
|
"loss": 0.5651, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.8502869066249348, |
|
"eval_f1": 0.8323302022733028, |
|
"eval_loss": 0.44371965527534485, |
|
"eval_roc_auc": 0.9564279884814689, |
|
"eval_runtime": 30.9158, |
|
"eval_samples_per_second": 62.007, |
|
"eval_steps_per_second": 1.941, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"learning_rate": 4.935185185185186e-05, |
|
"loss": 0.576, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 4.925925925925926e-05, |
|
"loss": 0.5669, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 4.9166666666666665e-05, |
|
"loss": 0.5954, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 4.9074074074074075e-05, |
|
"loss": 0.5232, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 4.8981481481481486e-05, |
|
"loss": 0.5216, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.888888888888889e-05, |
|
"loss": 0.574, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.863849765258216, |
|
"eval_f1": 0.8530973509302869, |
|
"eval_loss": 0.40030428767204285, |
|
"eval_roc_auc": 0.9616688906216743, |
|
"eval_runtime": 30.8921, |
|
"eval_samples_per_second": 62.055, |
|
"eval_steps_per_second": 1.942, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 4.87962962962963e-05, |
|
"loss": 0.5462, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 4.8703703703703704e-05, |
|
"loss": 0.5626, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 4.8611111111111115e-05, |
|
"loss": 0.535, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 4.851851851851852e-05, |
|
"loss": 0.5251, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 4.842592592592593e-05, |
|
"loss": 0.5507, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 4.8333333333333334e-05, |
|
"loss": 0.5269, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.8721961398017736, |
|
"eval_f1": 0.8675936123138328, |
|
"eval_loss": 0.3901008367538452, |
|
"eval_roc_auc": 0.967124620364712, |
|
"eval_runtime": 31.0242, |
|
"eval_samples_per_second": 61.79, |
|
"eval_steps_per_second": 1.934, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 4.8240740740740744e-05, |
|
"loss": 0.5424, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 0.5145, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 4.805555555555556e-05, |
|
"loss": 0.5088, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 4.796296296296296e-05, |
|
"loss": 0.524, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 13.83, |
|
"learning_rate": 4.7870370370370373e-05, |
|
"loss": 0.5402, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 4.7777777777777784e-05, |
|
"loss": 0.5138, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy": 0.8732394366197183, |
|
"eval_f1": 0.8607439141106847, |
|
"eval_loss": 0.39837101101875305, |
|
"eval_roc_auc": 0.9685311128127906, |
|
"eval_runtime": 30.8559, |
|
"eval_samples_per_second": 62.128, |
|
"eval_steps_per_second": 1.945, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 4.768518518518519e-05, |
|
"loss": 0.4656, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 4.759259259259259e-05, |
|
"loss": 0.5275, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 4.75e-05, |
|
"loss": 0.5204, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 4.740740740740741e-05, |
|
"loss": 0.5162, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 4.731481481481482e-05, |
|
"loss": 0.4958, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 4.722222222222222e-05, |
|
"loss": 0.4839, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_accuracy": 0.8768909754825248, |
|
"eval_f1": 0.8682586053503393, |
|
"eval_loss": 0.376302033662796, |
|
"eval_roc_auc": 0.9701314143362457, |
|
"eval_runtime": 31.1252, |
|
"eval_samples_per_second": 61.59, |
|
"eval_steps_per_second": 1.928, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 4.712962962962963e-05, |
|
"loss": 0.4959, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 15.33, |
|
"learning_rate": 4.703703703703704e-05, |
|
"loss": 0.4924, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 4.6944444444444446e-05, |
|
"loss": 0.5356, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 15.67, |
|
"learning_rate": 4.685185185185185e-05, |
|
"loss": 0.5063, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 4.675925925925926e-05, |
|
"loss": 0.4696, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 0.463, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.8894105372978612, |
|
"eval_f1": 0.8836985599474323, |
|
"eval_loss": 0.3397548496723175, |
|
"eval_roc_auc": 0.9718033457050373, |
|
"eval_runtime": 30.8757, |
|
"eval_samples_per_second": 62.088, |
|
"eval_steps_per_second": 1.943, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"learning_rate": 4.6574074074074076e-05, |
|
"loss": 0.4963, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"learning_rate": 4.648148148148148e-05, |
|
"loss": 0.4818, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 4.638888888888889e-05, |
|
"loss": 0.4686, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 0.4525, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 16.83, |
|
"learning_rate": 4.6203703703703705e-05, |
|
"loss": 0.4735, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 4.6111111111111115e-05, |
|
"loss": 0.4767, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_accuracy": 0.8914971309337506, |
|
"eval_f1": 0.8845954057184805, |
|
"eval_loss": 0.3293467164039612, |
|
"eval_roc_auc": 0.9738290344332811, |
|
"eval_runtime": 30.9209, |
|
"eval_samples_per_second": 61.997, |
|
"eval_steps_per_second": 1.94, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 17.17, |
|
"learning_rate": 4.601851851851852e-05, |
|
"loss": 0.4284, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"learning_rate": 4.592592592592593e-05, |
|
"loss": 0.4957, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 0.4756, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"learning_rate": 4.5740740740740745e-05, |
|
"loss": 0.4693, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 17.83, |
|
"learning_rate": 4.564814814814815e-05, |
|
"loss": 0.4691, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 4.555555555555556e-05, |
|
"loss": 0.4985, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_accuracy": 0.8862806468440271, |
|
"eval_f1": 0.8852238967055369, |
|
"eval_loss": 0.33499640226364136, |
|
"eval_roc_auc": 0.9763127743838852, |
|
"eval_runtime": 31.1019, |
|
"eval_samples_per_second": 61.636, |
|
"eval_steps_per_second": 1.929, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 18.17, |
|
"learning_rate": 4.546296296296296e-05, |
|
"loss": 0.47, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 18.33, |
|
"learning_rate": 4.5370370370370374e-05, |
|
"loss": 0.4907, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 4.527777777777778e-05, |
|
"loss": 0.44, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"learning_rate": 4.518518518518519e-05, |
|
"loss": 0.4573, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 18.83, |
|
"learning_rate": 4.50925925925926e-05, |
|
"loss": 0.4056, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.4657, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_accuracy": 0.8951486697965572, |
|
"eval_f1": 0.8871889780996989, |
|
"eval_loss": 0.33693990111351013, |
|
"eval_roc_auc": 0.9746493362037599, |
|
"eval_runtime": 30.9425, |
|
"eval_samples_per_second": 61.954, |
|
"eval_steps_per_second": 1.939, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 19.17, |
|
"learning_rate": 4.490740740740741e-05, |
|
"loss": 0.4504, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 19.33, |
|
"learning_rate": 4.481481481481482e-05, |
|
"loss": 0.4723, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 4.472222222222223e-05, |
|
"loss": 0.4148, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"learning_rate": 4.462962962962963e-05, |
|
"loss": 0.4446, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"learning_rate": 4.4537037037037036e-05, |
|
"loss": 0.4566, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.4514, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.8925404277516954, |
|
"eval_f1": 0.8880165267578498, |
|
"eval_loss": 0.32129156589508057, |
|
"eval_roc_auc": 0.9749826486859693, |
|
"eval_runtime": 30.9956, |
|
"eval_samples_per_second": 61.847, |
|
"eval_steps_per_second": 1.936, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 20.17, |
|
"learning_rate": 4.435185185185186e-05, |
|
"loss": 0.4075, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"learning_rate": 4.425925925925926e-05, |
|
"loss": 0.4775, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 4.4166666666666665e-05, |
|
"loss": 0.4486, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 20.67, |
|
"learning_rate": 4.4074074074074076e-05, |
|
"loss": 0.4464, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 20.83, |
|
"learning_rate": 4.3981481481481486e-05, |
|
"loss": 0.4287, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"learning_rate": 4.388888888888889e-05, |
|
"loss": 0.4207, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"eval_accuracy": 0.8977569118414189, |
|
"eval_f1": 0.8943328478428479, |
|
"eval_loss": 0.31750577688217163, |
|
"eval_roc_auc": 0.9770830922893768, |
|
"eval_runtime": 30.8486, |
|
"eval_samples_per_second": 62.142, |
|
"eval_steps_per_second": 1.945, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 21.17, |
|
"learning_rate": 4.3796296296296294e-05, |
|
"loss": 0.4354, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 21.33, |
|
"learning_rate": 4.3703703703703705e-05, |
|
"loss": 0.4387, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 21.5, |
|
"learning_rate": 4.3611111111111116e-05, |
|
"loss": 0.4229, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 21.67, |
|
"learning_rate": 4.351851851851852e-05, |
|
"loss": 0.4408, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 21.83, |
|
"learning_rate": 4.342592592592592e-05, |
|
"loss": 0.4114, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 0.4522, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"eval_accuracy": 0.8982785602503912, |
|
"eval_f1": 0.89697139555418, |
|
"eval_loss": 0.3228977620601654, |
|
"eval_roc_auc": 0.9766625249534777, |
|
"eval_runtime": 30.8578, |
|
"eval_samples_per_second": 62.124, |
|
"eval_steps_per_second": 1.944, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 22.17, |
|
"learning_rate": 4.3240740740740745e-05, |
|
"loss": 0.3922, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 22.33, |
|
"learning_rate": 4.314814814814815e-05, |
|
"loss": 0.3809, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"learning_rate": 4.305555555555556e-05, |
|
"loss": 0.4079, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"learning_rate": 4.296296296296296e-05, |
|
"loss": 0.412, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 22.83, |
|
"learning_rate": 4.2870370370370374e-05, |
|
"loss": 0.3794, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"learning_rate": 4.277777777777778e-05, |
|
"loss": 0.4328, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"eval_accuracy": 0.8977569118414189, |
|
"eval_f1": 0.8947876464589353, |
|
"eval_loss": 0.31209608912467957, |
|
"eval_roc_auc": 0.979110772862328, |
|
"eval_runtime": 31.0256, |
|
"eval_samples_per_second": 61.788, |
|
"eval_steps_per_second": 1.934, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 23.17, |
|
"learning_rate": 4.268518518518519e-05, |
|
"loss": 0.3926, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 23.33, |
|
"learning_rate": 4.259259259259259e-05, |
|
"loss": 0.4003, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 23.5, |
|
"learning_rate": 4.25e-05, |
|
"loss": 0.375, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 23.67, |
|
"learning_rate": 4.240740740740741e-05, |
|
"loss": 0.3676, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 23.83, |
|
"learning_rate": 4.231481481481482e-05, |
|
"loss": 0.4068, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 4.222222222222222e-05, |
|
"loss": 0.3942, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy": 0.9029733959311425, |
|
"eval_f1": 0.8992700731616186, |
|
"eval_loss": 0.3111121356487274, |
|
"eval_roc_auc": 0.9764683636725175, |
|
"eval_runtime": 30.8717, |
|
"eval_samples_per_second": 62.096, |
|
"eval_steps_per_second": 1.944, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 24.17, |
|
"learning_rate": 4.212962962962963e-05, |
|
"loss": 0.3966, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 24.33, |
|
"learning_rate": 4.203703703703704e-05, |
|
"loss": 0.3767, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 24.5, |
|
"learning_rate": 4.194444444444445e-05, |
|
"loss": 0.4047, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 24.67, |
|
"learning_rate": 4.185185185185185e-05, |
|
"loss": 0.3884, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 24.83, |
|
"learning_rate": 4.175925925925926e-05, |
|
"loss": 0.3499, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.4414, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"eval_accuracy": 0.9061032863849765, |
|
"eval_f1": 0.9031554688660446, |
|
"eval_loss": 0.30617934465408325, |
|
"eval_roc_auc": 0.9762987886943956, |
|
"eval_runtime": 31.0067, |
|
"eval_samples_per_second": 61.825, |
|
"eval_steps_per_second": 1.935, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 25.17, |
|
"learning_rate": 4.1574074074074076e-05, |
|
"loss": 0.4037, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 25.33, |
|
"learning_rate": 4.148148148148148e-05, |
|
"loss": 0.4282, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 25.5, |
|
"learning_rate": 4.138888888888889e-05, |
|
"loss": 0.4035, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 25.67, |
|
"learning_rate": 4.12962962962963e-05, |
|
"loss": 0.366, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 25.83, |
|
"learning_rate": 4.1203703703703705e-05, |
|
"loss": 0.3912, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"learning_rate": 4.111111111111111e-05, |
|
"loss": 0.3608, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"eval_accuracy": 0.9014084507042254, |
|
"eval_f1": 0.8996931816013317, |
|
"eval_loss": 0.309887558221817, |
|
"eval_roc_auc": 0.9786590060720087, |
|
"eval_runtime": 30.8321, |
|
"eval_samples_per_second": 62.175, |
|
"eval_steps_per_second": 1.946, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 26.17, |
|
"learning_rate": 4.101851851851852e-05, |
|
"loss": 0.3749, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 26.33, |
|
"learning_rate": 4.092592592592593e-05, |
|
"loss": 0.3948, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 26.5, |
|
"learning_rate": 4.0833333333333334e-05, |
|
"loss": 0.3999, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 26.67, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 0.3949, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 26.83, |
|
"learning_rate": 4.064814814814815e-05, |
|
"loss": 0.4086, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"learning_rate": 4.055555555555556e-05, |
|
"loss": 0.3729, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"eval_accuracy": 0.908189880020866, |
|
"eval_f1": 0.9029396025123122, |
|
"eval_loss": 0.305014431476593, |
|
"eval_roc_auc": 0.9782605273316775, |
|
"eval_runtime": 30.761, |
|
"eval_samples_per_second": 62.319, |
|
"eval_steps_per_second": 1.951, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 27.17, |
|
"learning_rate": 4.0462962962962963e-05, |
|
"loss": 0.4176, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 27.33, |
|
"learning_rate": 4.0370370370370374e-05, |
|
"loss": 0.4117, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 27.5, |
|
"learning_rate": 4.027777777777778e-05, |
|
"loss": 0.3567, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 27.67, |
|
"learning_rate": 4.018518518518519e-05, |
|
"loss": 0.3693, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 27.83, |
|
"learning_rate": 4.009259259259259e-05, |
|
"loss": 0.3566, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"learning_rate": 4e-05, |
|
"loss": 0.393, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_accuracy": 0.9107981220657277, |
|
"eval_f1": 0.9089869246713798, |
|
"eval_loss": 0.2970489263534546, |
|
"eval_roc_auc": 0.9796921899115975, |
|
"eval_runtime": 31.0331, |
|
"eval_samples_per_second": 61.773, |
|
"eval_steps_per_second": 1.933, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 28.17, |
|
"learning_rate": 3.990740740740741e-05, |
|
"loss": 0.3726, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 28.33, |
|
"learning_rate": 3.981481481481482e-05, |
|
"loss": 0.3762, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 28.5, |
|
"learning_rate": 3.972222222222222e-05, |
|
"loss": 0.4289, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 28.67, |
|
"learning_rate": 3.962962962962963e-05, |
|
"loss": 0.3754, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 28.83, |
|
"learning_rate": 3.9537037037037036e-05, |
|
"loss": 0.3363, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"learning_rate": 3.944444444444445e-05, |
|
"loss": 0.402, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"eval_accuracy": 0.9113197704747, |
|
"eval_f1": 0.9087183876436168, |
|
"eval_loss": 0.2985842525959015, |
|
"eval_roc_auc": 0.9793433765347666, |
|
"eval_runtime": 30.969, |
|
"eval_samples_per_second": 61.901, |
|
"eval_steps_per_second": 1.937, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 29.17, |
|
"learning_rate": 3.935185185185186e-05, |
|
"loss": 0.3481, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 29.33, |
|
"learning_rate": 3.925925925925926e-05, |
|
"loss": 0.4261, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 29.5, |
|
"learning_rate": 3.9166666666666665e-05, |
|
"loss": 0.3484, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 29.67, |
|
"learning_rate": 3.9074074074074076e-05, |
|
"loss": 0.3612, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 29.83, |
|
"learning_rate": 3.898148148148149e-05, |
|
"loss": 0.3758, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 0.3697, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"eval_accuracy": 0.9024517475221701, |
|
"eval_f1": 0.8967746683628594, |
|
"eval_loss": 0.3384450078010559, |
|
"eval_roc_auc": 0.9769238753852608, |
|
"eval_runtime": 30.8671, |
|
"eval_samples_per_second": 62.105, |
|
"eval_steps_per_second": 1.944, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 30.17, |
|
"learning_rate": 3.8796296296296295e-05, |
|
"loss": 0.3331, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 30.33, |
|
"learning_rate": 3.8703703703703705e-05, |
|
"loss": 0.399, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 30.5, |
|
"learning_rate": 3.8611111111111116e-05, |
|
"loss": 0.381, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 30.67, |
|
"learning_rate": 3.851851851851852e-05, |
|
"loss": 0.3682, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 30.83, |
|
"learning_rate": 3.8425925925925924e-05, |
|
"loss": 0.3702, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"learning_rate": 3.8333333333333334e-05, |
|
"loss": 0.3502, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"eval_accuracy": 0.9102764736567553, |
|
"eval_f1": 0.9058244823258932, |
|
"eval_loss": 0.3035435378551483, |
|
"eval_roc_auc": 0.978857810039505, |
|
"eval_runtime": 30.8727, |
|
"eval_samples_per_second": 62.094, |
|
"eval_steps_per_second": 1.943, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 31.17, |
|
"learning_rate": 3.8240740740740745e-05, |
|
"loss": 0.3545, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 31.33, |
|
"learning_rate": 3.814814814814815e-05, |
|
"loss": 0.3626, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 31.5, |
|
"learning_rate": 3.805555555555555e-05, |
|
"loss": 0.3393, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 31.67, |
|
"learning_rate": 3.7962962962962964e-05, |
|
"loss": 0.3279, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 31.83, |
|
"learning_rate": 3.7870370370370374e-05, |
|
"loss": 0.3332, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"learning_rate": 3.777777777777778e-05, |
|
"loss": 0.3653, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_accuracy": 0.9024517475221701, |
|
"eval_f1": 0.9023846727475806, |
|
"eval_loss": 0.3126789629459381, |
|
"eval_roc_auc": 0.9787706292725965, |
|
"eval_runtime": 30.8857, |
|
"eval_samples_per_second": 62.068, |
|
"eval_steps_per_second": 1.943, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 32.17, |
|
"learning_rate": 3.768518518518518e-05, |
|
"loss": 0.3472, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 32.33, |
|
"learning_rate": 3.759259259259259e-05, |
|
"loss": 0.3398, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 32.5, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.3706, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 32.67, |
|
"learning_rate": 3.740740740740741e-05, |
|
"loss": 0.3413, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 32.83, |
|
"learning_rate": 3.731481481481482e-05, |
|
"loss": 0.3371, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"learning_rate": 3.722222222222222e-05, |
|
"loss": 0.3898, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"eval_accuracy": 0.9061032863849765, |
|
"eval_f1": 0.9050367858097491, |
|
"eval_loss": 0.32219406962394714, |
|
"eval_roc_auc": 0.9778496116752868, |
|
"eval_runtime": 31.0225, |
|
"eval_samples_per_second": 61.794, |
|
"eval_steps_per_second": 1.934, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 33.17, |
|
"learning_rate": 3.712962962962963e-05, |
|
"loss": 0.3495, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 33.33, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.3505, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 33.5, |
|
"learning_rate": 3.694444444444445e-05, |
|
"loss": 0.3248, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 33.67, |
|
"learning_rate": 3.685185185185185e-05, |
|
"loss": 0.323, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 33.83, |
|
"learning_rate": 3.675925925925926e-05, |
|
"loss": 0.3586, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 0.317, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"eval_accuracy": 0.9139280125195618, |
|
"eval_f1": 0.9123929576037129, |
|
"eval_loss": 0.30128011107444763, |
|
"eval_roc_auc": 0.9798385920819667, |
|
"eval_runtime": 31.1045, |
|
"eval_samples_per_second": 61.631, |
|
"eval_steps_per_second": 1.929, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 34.17, |
|
"learning_rate": 3.6574074074074076e-05, |
|
"loss": 0.3429, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 34.33, |
|
"learning_rate": 3.648148148148148e-05, |
|
"loss": 0.3753, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 34.5, |
|
"learning_rate": 3.638888888888889e-05, |
|
"loss": 0.3698, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 34.67, |
|
"learning_rate": 3.62962962962963e-05, |
|
"loss": 0.3595, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 34.83, |
|
"learning_rate": 3.6203703703703706e-05, |
|
"loss": 0.3423, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"learning_rate": 3.611111111111111e-05, |
|
"loss": 0.3166, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"eval_accuracy": 0.9134063641105895, |
|
"eval_f1": 0.9095292266973257, |
|
"eval_loss": 0.3185039162635803, |
|
"eval_roc_auc": 0.9775317911265304, |
|
"eval_runtime": 31.1126, |
|
"eval_samples_per_second": 61.615, |
|
"eval_steps_per_second": 1.928, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 35.17, |
|
"learning_rate": 3.601851851851852e-05, |
|
"loss": 0.3309, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 35.33, |
|
"learning_rate": 3.592592592592593e-05, |
|
"loss": 0.3442, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 35.5, |
|
"learning_rate": 3.5833333333333335e-05, |
|
"loss": 0.3435, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 35.67, |
|
"learning_rate": 3.574074074074074e-05, |
|
"loss": 0.3533, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 35.83, |
|
"learning_rate": 3.564814814814815e-05, |
|
"loss": 0.3834, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"learning_rate": 3.555555555555556e-05, |
|
"loss": 0.3771, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_accuracy": 0.9066249347939489, |
|
"eval_f1": 0.9048923351938147, |
|
"eval_loss": 0.3066847622394562, |
|
"eval_roc_auc": 0.9782356278578076, |
|
"eval_runtime": 31.3435, |
|
"eval_samples_per_second": 61.161, |
|
"eval_steps_per_second": 1.914, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 36.17, |
|
"learning_rate": 3.5462962962962964e-05, |
|
"loss": 0.3224, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 36.33, |
|
"learning_rate": 3.537037037037037e-05, |
|
"loss": 0.3377, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 36.5, |
|
"learning_rate": 3.527777777777778e-05, |
|
"loss": 0.3458, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 36.67, |
|
"learning_rate": 3.518518518518519e-05, |
|
"loss": 0.3623, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 36.83, |
|
"learning_rate": 3.509259259259259e-05, |
|
"loss": 0.3362, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.3487, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"eval_accuracy": 0.9134063641105895, |
|
"eval_f1": 0.9117579891305719, |
|
"eval_loss": 0.2947899401187897, |
|
"eval_roc_auc": 0.980099185348387, |
|
"eval_runtime": 31.0549, |
|
"eval_samples_per_second": 61.729, |
|
"eval_steps_per_second": 1.932, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 37.17, |
|
"learning_rate": 3.490740740740741e-05, |
|
"loss": 0.2907, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 37.33, |
|
"learning_rate": 3.481481481481482e-05, |
|
"loss": 0.3329, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 37.5, |
|
"learning_rate": 3.472222222222222e-05, |
|
"loss": 0.3353, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 37.67, |
|
"learning_rate": 3.4629629629629626e-05, |
|
"loss": 0.3426, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 37.83, |
|
"learning_rate": 3.453703703703704e-05, |
|
"loss": 0.3018, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"learning_rate": 3.444444444444445e-05, |
|
"loss": 0.3202, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"eval_accuracy": 0.918622848200313, |
|
"eval_f1": 0.9167721477242283, |
|
"eval_loss": 0.29156479239463806, |
|
"eval_roc_auc": 0.978799051349497, |
|
"eval_runtime": 30.9244, |
|
"eval_samples_per_second": 61.99, |
|
"eval_steps_per_second": 1.94, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 38.17, |
|
"learning_rate": 3.435185185185185e-05, |
|
"loss": 0.3297, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 38.33, |
|
"learning_rate": 3.425925925925926e-05, |
|
"loss": 0.3051, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 38.5, |
|
"learning_rate": 3.4166666666666666e-05, |
|
"loss": 0.315, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 38.67, |
|
"learning_rate": 3.4074074074074077e-05, |
|
"loss": 0.3474, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 38.83, |
|
"learning_rate": 3.398148148148148e-05, |
|
"loss": 0.3566, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"learning_rate": 3.388888888888889e-05, |
|
"loss": 0.3163, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"eval_accuracy": 0.9154929577464789, |
|
"eval_f1": 0.9141142757105575, |
|
"eval_loss": 0.3148981034755707, |
|
"eval_roc_auc": 0.9776534076077666, |
|
"eval_runtime": 30.9687, |
|
"eval_samples_per_second": 61.901, |
|
"eval_steps_per_second": 1.937, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 39.17, |
|
"learning_rate": 3.3796296296296295e-05, |
|
"loss": 0.3534, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 39.33, |
|
"learning_rate": 3.3703703703703706e-05, |
|
"loss": 0.3158, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 39.5, |
|
"learning_rate": 3.3611111111111116e-05, |
|
"loss": 0.3126, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 39.67, |
|
"learning_rate": 3.351851851851852e-05, |
|
"loss": 0.3331, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 39.83, |
|
"learning_rate": 3.3425925925925924e-05, |
|
"loss": 0.2788, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.3605, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_accuracy": 0.9207094418362024, |
|
"eval_f1": 0.919181121459014, |
|
"eval_loss": 0.2963573634624481, |
|
"eval_roc_auc": 0.9796925781936262, |
|
"eval_runtime": 31.0457, |
|
"eval_samples_per_second": 61.748, |
|
"eval_steps_per_second": 1.933, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 40.17, |
|
"learning_rate": 3.3240740740740746e-05, |
|
"loss": 0.318, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 40.33, |
|
"learning_rate": 3.314814814814815e-05, |
|
"loss": 0.3283, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 40.5, |
|
"learning_rate": 3.3055555555555553e-05, |
|
"loss": 0.3141, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 40.67, |
|
"learning_rate": 3.2962962962962964e-05, |
|
"loss": 0.3245, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 40.83, |
|
"learning_rate": 3.2870370370370375e-05, |
|
"loss": 0.3381, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 41.0, |
|
"learning_rate": 3.277777777777778e-05, |
|
"loss": 0.3636, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 41.0, |
|
"eval_accuracy": 0.9134063641105895, |
|
"eval_f1": 0.9111347359382455, |
|
"eval_loss": 0.3142088055610657, |
|
"eval_roc_auc": 0.9809569663590326, |
|
"eval_runtime": 30.9606, |
|
"eval_samples_per_second": 61.917, |
|
"eval_steps_per_second": 1.938, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 41.17, |
|
"learning_rate": 3.268518518518518e-05, |
|
"loss": 0.3145, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 41.33, |
|
"learning_rate": 3.25925925925926e-05, |
|
"loss": 0.3277, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 41.5, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 0.3806, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 41.67, |
|
"learning_rate": 3.240740740740741e-05, |
|
"loss": 0.3386, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 41.83, |
|
"learning_rate": 3.231481481481481e-05, |
|
"loss": 0.3237, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"learning_rate": 3.222222222222223e-05, |
|
"loss": 0.3454, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"eval_accuracy": 0.9113197704747, |
|
"eval_f1": 0.9111205125376055, |
|
"eval_loss": 0.31330010294914246, |
|
"eval_roc_auc": 0.9791633758091489, |
|
"eval_runtime": 31.2016, |
|
"eval_samples_per_second": 61.439, |
|
"eval_steps_per_second": 1.923, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 42.17, |
|
"learning_rate": 3.212962962962963e-05, |
|
"loss": 0.3132, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 42.33, |
|
"learning_rate": 3.203703703703704e-05, |
|
"loss": 0.337, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 42.5, |
|
"learning_rate": 3.194444444444444e-05, |
|
"loss": 0.3093, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 42.67, |
|
"learning_rate": 3.185185185185185e-05, |
|
"loss": 0.3306, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 42.83, |
|
"learning_rate": 3.175925925925926e-05, |
|
"loss": 0.3781, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 43.0, |
|
"learning_rate": 3.1666666666666666e-05, |
|
"loss": 0.3561, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 43.0, |
|
"eval_accuracy": 0.9076682316118936, |
|
"eval_f1": 0.9072816984688045, |
|
"eval_loss": 0.3090434968471527, |
|
"eval_roc_auc": 0.9804251537873032, |
|
"eval_runtime": 30.8687, |
|
"eval_samples_per_second": 62.102, |
|
"eval_steps_per_second": 1.944, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 43.17, |
|
"learning_rate": 3.157407407407408e-05, |
|
"loss": 0.3419, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 43.33, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 0.3213, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 43.5, |
|
"learning_rate": 3.138888888888889e-05, |
|
"loss": 0.3134, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 43.67, |
|
"learning_rate": 3.1296296296296295e-05, |
|
"loss": 0.3419, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 43.83, |
|
"learning_rate": 3.1203703703703706e-05, |
|
"loss": 0.282, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 0.3136, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"eval_accuracy": 0.9175795513823682, |
|
"eval_f1": 0.9143946754568497, |
|
"eval_loss": 0.32355380058288574, |
|
"eval_roc_auc": 0.9781668988388087, |
|
"eval_runtime": 31.0613, |
|
"eval_samples_per_second": 61.717, |
|
"eval_steps_per_second": 1.932, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 44.17, |
|
"learning_rate": 3.101851851851852e-05, |
|
"loss": 0.3187, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 44.33, |
|
"learning_rate": 3.0925925925925924e-05, |
|
"loss": 0.3254, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 44.5, |
|
"learning_rate": 3.0833333333333335e-05, |
|
"loss": 0.292, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 44.67, |
|
"learning_rate": 3.074074074074074e-05, |
|
"loss": 0.3079, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 44.83, |
|
"learning_rate": 3.064814814814815e-05, |
|
"loss": 0.2808, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"learning_rate": 3.055555555555556e-05, |
|
"loss": 0.3529, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"eval_accuracy": 0.92018779342723, |
|
"eval_f1": 0.9174792311793412, |
|
"eval_loss": 0.30541861057281494, |
|
"eval_roc_auc": 0.9800384924019299, |
|
"eval_runtime": 31.1166, |
|
"eval_samples_per_second": 61.607, |
|
"eval_steps_per_second": 1.928, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 45.17, |
|
"learning_rate": 3.0462962962962964e-05, |
|
"loss": 0.353, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 45.33, |
|
"learning_rate": 3.037037037037037e-05, |
|
"loss": 0.3488, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 45.5, |
|
"learning_rate": 3.0277777777777776e-05, |
|
"loss": 0.2973, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 45.67, |
|
"learning_rate": 3.018518518518519e-05, |
|
"loss": 0.325, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 45.83, |
|
"learning_rate": 3.0092592592592593e-05, |
|
"loss": 0.3183, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"learning_rate": 3e-05, |
|
"loss": 0.2987, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"eval_accuracy": 0.9233176838810642, |
|
"eval_f1": 0.9221803007650173, |
|
"eval_loss": 0.294436514377594, |
|
"eval_roc_auc": 0.9801680285600346, |
|
"eval_runtime": 30.9755, |
|
"eval_samples_per_second": 61.888, |
|
"eval_steps_per_second": 1.937, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 46.17, |
|
"learning_rate": 2.9907407407407405e-05, |
|
"loss": 0.2837, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 46.33, |
|
"learning_rate": 2.981481481481482e-05, |
|
"loss": 0.3087, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 46.5, |
|
"learning_rate": 2.9722222222222223e-05, |
|
"loss": 0.2876, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 46.67, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 0.3132, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 46.83, |
|
"learning_rate": 2.953703703703704e-05, |
|
"loss": 0.2676, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 47.0, |
|
"learning_rate": 2.9444444444444448e-05, |
|
"loss": 0.2966, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 47.0, |
|
"eval_accuracy": 0.9233176838810642, |
|
"eval_f1": 0.920079923314828, |
|
"eval_loss": 0.3215111196041107, |
|
"eval_roc_auc": 0.9785648638154409, |
|
"eval_runtime": 31.0099, |
|
"eval_samples_per_second": 61.819, |
|
"eval_steps_per_second": 1.935, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 47.17, |
|
"learning_rate": 2.9351851851851852e-05, |
|
"loss": 0.297, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 47.33, |
|
"learning_rate": 2.925925925925926e-05, |
|
"loss": 0.2923, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 47.5, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 0.2761, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 47.67, |
|
"learning_rate": 2.9074074074074077e-05, |
|
"loss": 0.3353, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 47.83, |
|
"learning_rate": 2.898148148148148e-05, |
|
"loss": 0.3118, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"learning_rate": 2.8888888888888888e-05, |
|
"loss": 0.3203, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"eval_accuracy": 0.9243609806990088, |
|
"eval_f1": 0.9218866596198343, |
|
"eval_loss": 0.31495025753974915, |
|
"eval_roc_auc": 0.9797406270362136, |
|
"eval_runtime": 30.9782, |
|
"eval_samples_per_second": 61.882, |
|
"eval_steps_per_second": 1.937, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 48.17, |
|
"learning_rate": 2.87962962962963e-05, |
|
"loss": 0.2725, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 48.33, |
|
"learning_rate": 2.8703703703703706e-05, |
|
"loss": 0.3288, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 48.5, |
|
"learning_rate": 2.861111111111111e-05, |
|
"loss": 0.2848, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 48.67, |
|
"learning_rate": 2.851851851851852e-05, |
|
"loss": 0.2902, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 48.83, |
|
"learning_rate": 2.8425925925925928e-05, |
|
"loss": 0.3362, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 49.0, |
|
"learning_rate": 2.8333333333333335e-05, |
|
"loss": 0.2821, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 49.0, |
|
"eval_accuracy": 0.9290558163797601, |
|
"eval_f1": 0.9273079215291836, |
|
"eval_loss": 0.30717700719833374, |
|
"eval_roc_auc": 0.980032505249785, |
|
"eval_runtime": 31.1146, |
|
"eval_samples_per_second": 61.611, |
|
"eval_steps_per_second": 1.928, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 49.17, |
|
"learning_rate": 2.824074074074074e-05, |
|
"loss": 0.3273, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 49.33, |
|
"learning_rate": 2.814814814814815e-05, |
|
"loss": 0.3114, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 49.5, |
|
"learning_rate": 2.8055555555555557e-05, |
|
"loss": 0.3081, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 49.67, |
|
"learning_rate": 2.7962962962962965e-05, |
|
"loss": 0.281, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 49.83, |
|
"learning_rate": 2.7870370370370375e-05, |
|
"loss": 0.2993, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.2852, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"eval_accuracy": 0.9175795513823682, |
|
"eval_f1": 0.9154771942358105, |
|
"eval_loss": 0.32651108503341675, |
|
"eval_roc_auc": 0.9792309379704945, |
|
"eval_runtime": 31.0614, |
|
"eval_samples_per_second": 61.716, |
|
"eval_steps_per_second": 1.932, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 50.17, |
|
"learning_rate": 2.7685185185185186e-05, |
|
"loss": 0.2835, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 50.33, |
|
"learning_rate": 2.7592592592592594e-05, |
|
"loss": 0.315, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 50.5, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.3096, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 50.67, |
|
"learning_rate": 2.7407407407407408e-05, |
|
"loss": 0.3351, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 50.83, |
|
"learning_rate": 2.7314814814814816e-05, |
|
"loss": 0.3037, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 51.0, |
|
"learning_rate": 2.7222222222222223e-05, |
|
"loss": 0.3544, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 51.0, |
|
"eval_accuracy": 0.9149713093375065, |
|
"eval_f1": 0.9149966409724043, |
|
"eval_loss": 0.31751149892807007, |
|
"eval_roc_auc": 0.9801850383933074, |
|
"eval_runtime": 30.9664, |
|
"eval_samples_per_second": 61.906, |
|
"eval_steps_per_second": 1.938, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 51.17, |
|
"learning_rate": 2.7129629629629634e-05, |
|
"loss": 0.3055, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 51.33, |
|
"learning_rate": 2.7037037037037037e-05, |
|
"loss": 0.268, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 51.5, |
|
"learning_rate": 2.6944444444444445e-05, |
|
"loss": 0.3169, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 51.67, |
|
"learning_rate": 2.6851851851851855e-05, |
|
"loss": 0.3145, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 51.83, |
|
"learning_rate": 2.6759259259259263e-05, |
|
"loss": 0.2648, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.3327, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"eval_accuracy": 0.9243609806990088, |
|
"eval_f1": 0.9221864154492979, |
|
"eval_loss": 0.31344056129455566, |
|
"eval_roc_auc": 0.9802070926178522, |
|
"eval_runtime": 30.8335, |
|
"eval_samples_per_second": 62.173, |
|
"eval_steps_per_second": 1.946, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 52.17, |
|
"learning_rate": 2.6574074074074074e-05, |
|
"loss": 0.2923, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 52.33, |
|
"learning_rate": 2.6481481481481485e-05, |
|
"loss": 0.2827, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 52.5, |
|
"learning_rate": 2.6388888888888892e-05, |
|
"loss": 0.3051, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 52.67, |
|
"learning_rate": 2.6296296296296296e-05, |
|
"loss": 0.3418, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 52.83, |
|
"learning_rate": 2.6203703703703703e-05, |
|
"loss": 0.312, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 53.0, |
|
"learning_rate": 2.6111111111111114e-05, |
|
"loss": 0.2877, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 53.0, |
|
"eval_accuracy": 0.9165362545644236, |
|
"eval_f1": 0.9154266697091844, |
|
"eval_loss": 0.3222474157810211, |
|
"eval_roc_auc": 0.980546397557177, |
|
"eval_runtime": 31.0108, |
|
"eval_samples_per_second": 61.817, |
|
"eval_steps_per_second": 1.935, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 53.17, |
|
"learning_rate": 2.601851851851852e-05, |
|
"loss": 0.3011, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 53.33, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 0.3116, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 53.5, |
|
"learning_rate": 2.5833333333333336e-05, |
|
"loss": 0.2903, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 53.67, |
|
"learning_rate": 2.5740740740740743e-05, |
|
"loss": 0.2932, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 53.83, |
|
"learning_rate": 2.564814814814815e-05, |
|
"loss": 0.2816, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 54.0, |
|
"learning_rate": 2.5555555555555554e-05, |
|
"loss": 0.3089, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 54.0, |
|
"eval_accuracy": 0.9259259259259259, |
|
"eval_f1": 0.924844063747744, |
|
"eval_loss": 0.30451875925064087, |
|
"eval_roc_auc": 0.9810896700594666, |
|
"eval_runtime": 30.7749, |
|
"eval_samples_per_second": 62.291, |
|
"eval_steps_per_second": 1.95, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 54.17, |
|
"learning_rate": 2.5462962962962965e-05, |
|
"loss": 0.2764, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 54.33, |
|
"learning_rate": 2.5370370370370372e-05, |
|
"loss": 0.2805, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 54.5, |
|
"learning_rate": 2.527777777777778e-05, |
|
"loss": 0.3094, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 54.67, |
|
"learning_rate": 2.5185185185185183e-05, |
|
"loss": 0.2699, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 54.83, |
|
"learning_rate": 2.5092592592592594e-05, |
|
"loss": 0.2787, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 55.0, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.2904, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 55.0, |
|
"eval_accuracy": 0.918622848200313, |
|
"eval_f1": 0.9174621980217676, |
|
"eval_loss": 0.3300776183605194, |
|
"eval_roc_auc": 0.9786791637818757, |
|
"eval_runtime": 31.1955, |
|
"eval_samples_per_second": 61.451, |
|
"eval_steps_per_second": 1.923, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 55.17, |
|
"learning_rate": 2.490740740740741e-05, |
|
"loss": 0.2824, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 55.33, |
|
"learning_rate": 2.4814814814814816e-05, |
|
"loss": 0.2755, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 55.5, |
|
"learning_rate": 2.4722222222222223e-05, |
|
"loss": 0.3083, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 55.67, |
|
"learning_rate": 2.462962962962963e-05, |
|
"loss": 0.2926, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 55.83, |
|
"learning_rate": 2.4537037037037038e-05, |
|
"loss": 0.3447, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 0.2821, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"eval_accuracy": 0.9217527386541471, |
|
"eval_f1": 0.9206002214325367, |
|
"eval_loss": 0.30689331889152527, |
|
"eval_roc_auc": 0.9809813323357223, |
|
"eval_runtime": 30.8415, |
|
"eval_samples_per_second": 62.157, |
|
"eval_steps_per_second": 1.945, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 56.17, |
|
"learning_rate": 2.4351851851851852e-05, |
|
"loss": 0.3106, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 56.33, |
|
"learning_rate": 2.425925925925926e-05, |
|
"loss": 0.2783, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 56.5, |
|
"learning_rate": 2.4166666666666667e-05, |
|
"loss": 0.2781, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 56.67, |
|
"learning_rate": 2.4074074074074074e-05, |
|
"loss": 0.2751, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 56.83, |
|
"learning_rate": 2.398148148148148e-05, |
|
"loss": 0.2861, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 57.0, |
|
"learning_rate": 2.3888888888888892e-05, |
|
"loss": 0.321, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 57.0, |
|
"eval_accuracy": 0.9269692227438706, |
|
"eval_f1": 0.9254350978490243, |
|
"eval_loss": 0.32090768218040466, |
|
"eval_roc_auc": 0.980025373202588, |
|
"eval_runtime": 30.8398, |
|
"eval_samples_per_second": 62.16, |
|
"eval_steps_per_second": 1.946, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 57.17, |
|
"learning_rate": 2.3796296296296296e-05, |
|
"loss": 0.3079, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 57.33, |
|
"learning_rate": 2.3703703703703707e-05, |
|
"loss": 0.3239, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 57.5, |
|
"learning_rate": 2.361111111111111e-05, |
|
"loss": 0.2967, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 57.67, |
|
"learning_rate": 2.351851851851852e-05, |
|
"loss": 0.2888, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 57.83, |
|
"learning_rate": 2.3425925925925925e-05, |
|
"loss": 0.2965, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 58.0, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 0.2995, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 58.0, |
|
"eval_accuracy": 0.9233176838810642, |
|
"eval_f1": 0.9202460923868973, |
|
"eval_loss": 0.32812780141830444, |
|
"eval_roc_auc": 0.9802116570487911, |
|
"eval_runtime": 30.7892, |
|
"eval_samples_per_second": 62.262, |
|
"eval_steps_per_second": 1.949, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 58.17, |
|
"learning_rate": 2.324074074074074e-05, |
|
"loss": 0.2897, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 58.33, |
|
"learning_rate": 2.314814814814815e-05, |
|
"loss": 0.279, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 58.5, |
|
"learning_rate": 2.3055555555555558e-05, |
|
"loss": 0.3187, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 58.67, |
|
"learning_rate": 2.2962962962962965e-05, |
|
"loss": 0.2889, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 58.83, |
|
"learning_rate": 2.2870370370370372e-05, |
|
"loss": 0.3007, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 59.0, |
|
"learning_rate": 2.277777777777778e-05, |
|
"loss": 0.2683, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 59.0, |
|
"eval_accuracy": 0.92018779342723, |
|
"eval_f1": 0.9173963500365258, |
|
"eval_loss": 0.3262675702571869, |
|
"eval_roc_auc": 0.9801554792004114, |
|
"eval_runtime": 30.9822, |
|
"eval_samples_per_second": 61.874, |
|
"eval_steps_per_second": 1.937, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 59.17, |
|
"learning_rate": 2.2685185185185187e-05, |
|
"loss": 0.2798, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 59.33, |
|
"learning_rate": 2.2592592592592594e-05, |
|
"loss": 0.2968, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 59.5, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.293, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 59.67, |
|
"learning_rate": 2.240740740740741e-05, |
|
"loss": 0.3071, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 59.83, |
|
"learning_rate": 2.2314814814814816e-05, |
|
"loss": 0.275, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.3021, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"eval_accuracy": 0.918622848200313, |
|
"eval_f1": 0.9170405607415676, |
|
"eval_loss": 0.3483915627002716, |
|
"eval_roc_auc": 0.9788368798211406, |
|
"eval_runtime": 30.8968, |
|
"eval_samples_per_second": 62.045, |
|
"eval_steps_per_second": 1.942, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 60.17, |
|
"learning_rate": 2.212962962962963e-05, |
|
"loss": 0.279, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 60.33, |
|
"learning_rate": 2.2037037037037038e-05, |
|
"loss": 0.2727, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 60.5, |
|
"learning_rate": 2.1944444444444445e-05, |
|
"loss": 0.2738, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 60.67, |
|
"learning_rate": 2.1851851851851852e-05, |
|
"loss": 0.2756, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 60.83, |
|
"learning_rate": 2.175925925925926e-05, |
|
"loss": 0.2846, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 61.0, |
|
"learning_rate": 2.1666666666666667e-05, |
|
"loss": 0.3262, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 61.0, |
|
"eval_accuracy": 0.9165362545644236, |
|
"eval_f1": 0.9150909158754031, |
|
"eval_loss": 0.327029287815094, |
|
"eval_roc_auc": 0.98071548830533, |
|
"eval_runtime": 30.9058, |
|
"eval_samples_per_second": 62.027, |
|
"eval_steps_per_second": 1.941, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 61.17, |
|
"learning_rate": 2.1574074074074074e-05, |
|
"loss": 0.281, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 61.33, |
|
"learning_rate": 2.148148148148148e-05, |
|
"loss": 0.2772, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 61.5, |
|
"learning_rate": 2.138888888888889e-05, |
|
"loss": 0.2932, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 61.67, |
|
"learning_rate": 2.1296296296296296e-05, |
|
"loss": 0.3086, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 61.83, |
|
"learning_rate": 2.1203703703703703e-05, |
|
"loss": 0.3077, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 62.0, |
|
"learning_rate": 2.111111111111111e-05, |
|
"loss": 0.2329, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 62.0, |
|
"eval_accuracy": 0.9233176838810642, |
|
"eval_f1": 0.9210533466274746, |
|
"eval_loss": 0.3280071020126343, |
|
"eval_roc_auc": 0.9806818031844223, |
|
"eval_runtime": 30.8772, |
|
"eval_samples_per_second": 62.085, |
|
"eval_steps_per_second": 1.943, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 62.17, |
|
"learning_rate": 2.101851851851852e-05, |
|
"loss": 0.2588, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 62.33, |
|
"learning_rate": 2.0925925925925925e-05, |
|
"loss": 0.2897, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 62.5, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 0.2828, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 62.67, |
|
"learning_rate": 2.074074074074074e-05, |
|
"loss": 0.3133, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 62.83, |
|
"learning_rate": 2.064814814814815e-05, |
|
"loss": 0.2802, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 63.0, |
|
"learning_rate": 2.0555555555555555e-05, |
|
"loss": 0.2935, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 63.0, |
|
"eval_accuracy": 0.9264475743348983, |
|
"eval_f1": 0.9243945598307481, |
|
"eval_loss": 0.32962048053741455, |
|
"eval_roc_auc": 0.9806711916360894, |
|
"eval_runtime": 30.8281, |
|
"eval_samples_per_second": 62.183, |
|
"eval_steps_per_second": 1.946, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 63.17, |
|
"learning_rate": 2.0462962962962965e-05, |
|
"loss": 0.2488, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 63.33, |
|
"learning_rate": 2.037037037037037e-05, |
|
"loss": 0.2998, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 63.5, |
|
"learning_rate": 2.027777777777778e-05, |
|
"loss": 0.2742, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 63.67, |
|
"learning_rate": 2.0185185185185187e-05, |
|
"loss": 0.2898, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 63.83, |
|
"learning_rate": 2.0092592592592594e-05, |
|
"loss": 0.2845, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2856, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"eval_accuracy": 0.9217527386541471, |
|
"eval_f1": 0.9209061464765306, |
|
"eval_loss": 0.33229467272758484, |
|
"eval_roc_auc": 0.9810524271999177, |
|
"eval_runtime": 31.0544, |
|
"eval_samples_per_second": 61.73, |
|
"eval_steps_per_second": 1.932, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 64.17, |
|
"learning_rate": 1.990740740740741e-05, |
|
"loss": 0.2797, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 64.33, |
|
"learning_rate": 1.9814814814814816e-05, |
|
"loss": 0.2596, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 64.5, |
|
"learning_rate": 1.9722222222222224e-05, |
|
"loss": 0.2541, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 64.67, |
|
"learning_rate": 1.962962962962963e-05, |
|
"loss": 0.295, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 64.83, |
|
"learning_rate": 1.9537037037037038e-05, |
|
"loss": 0.2947, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 65.0, |
|
"learning_rate": 1.9444444444444445e-05, |
|
"loss": 0.2829, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 65.0, |
|
"eval_accuracy": 0.9217527386541471, |
|
"eval_f1": 0.9200417695607408, |
|
"eval_loss": 0.3389854431152344, |
|
"eval_roc_auc": 0.9802066669139512, |
|
"eval_runtime": 30.8727, |
|
"eval_samples_per_second": 62.094, |
|
"eval_steps_per_second": 1.943, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 65.17, |
|
"learning_rate": 1.9351851851851853e-05, |
|
"loss": 0.2998, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 65.33, |
|
"learning_rate": 1.925925925925926e-05, |
|
"loss": 0.285, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 65.5, |
|
"learning_rate": 1.9166666666666667e-05, |
|
"loss": 0.2674, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 65.67, |
|
"learning_rate": 1.9074074074074075e-05, |
|
"loss": 0.2454, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 65.83, |
|
"learning_rate": 1.8981481481481482e-05, |
|
"loss": 0.2881, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 66.0, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 0.3044, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 66.0, |
|
"eval_accuracy": 0.9227960354720918, |
|
"eval_f1": 0.9215159958118743, |
|
"eval_loss": 0.3324008285999298, |
|
"eval_roc_auc": 0.9799450739817069, |
|
"eval_runtime": 30.9618, |
|
"eval_samples_per_second": 61.915, |
|
"eval_steps_per_second": 1.938, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 66.17, |
|
"learning_rate": 1.8796296296296296e-05, |
|
"loss": 0.2662, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 66.33, |
|
"learning_rate": 1.8703703703703704e-05, |
|
"loss": 0.3053, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 66.5, |
|
"learning_rate": 1.861111111111111e-05, |
|
"loss": 0.3118, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 66.67, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.2615, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 66.83, |
|
"learning_rate": 1.8425925925925926e-05, |
|
"loss": 0.2664, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 67.0, |
|
"learning_rate": 1.8333333333333333e-05, |
|
"loss": 0.2767, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 67.0, |
|
"eval_accuracy": 0.9160146061554513, |
|
"eval_f1": 0.9149949948229915, |
|
"eval_loss": 0.34964630007743835, |
|
"eval_roc_auc": 0.9777649341860628, |
|
"eval_runtime": 31.1987, |
|
"eval_samples_per_second": 61.445, |
|
"eval_steps_per_second": 1.923, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 67.17, |
|
"learning_rate": 1.824074074074074e-05, |
|
"loss": 0.3021, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 67.33, |
|
"learning_rate": 1.814814814814815e-05, |
|
"loss": 0.307, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 67.5, |
|
"learning_rate": 1.8055555555555555e-05, |
|
"loss": 0.2269, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 67.67, |
|
"learning_rate": 1.7962962962962965e-05, |
|
"loss": 0.2514, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 67.83, |
|
"learning_rate": 1.787037037037037e-05, |
|
"loss": 0.2634, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"learning_rate": 1.777777777777778e-05, |
|
"loss": 0.2936, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"eval_accuracy": 0.927490871152843, |
|
"eval_f1": 0.9257053426626348, |
|
"eval_loss": 0.3377942442893982, |
|
"eval_roc_auc": 0.9790075894232677, |
|
"eval_runtime": 30.9839, |
|
"eval_samples_per_second": 61.871, |
|
"eval_steps_per_second": 1.936, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 68.17, |
|
"learning_rate": 1.7685185185185184e-05, |
|
"loss": 0.2498, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 68.33, |
|
"learning_rate": 1.7592592592592595e-05, |
|
"loss": 0.2849, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 68.5, |
|
"learning_rate": 1.75e-05, |
|
"loss": 0.2601, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 68.67, |
|
"learning_rate": 1.740740740740741e-05, |
|
"loss": 0.2792, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 68.83, |
|
"learning_rate": 1.7314814814814813e-05, |
|
"loss": 0.318, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 69.0, |
|
"learning_rate": 1.7222222222222224e-05, |
|
"loss": 0.2884, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 69.0, |
|
"eval_accuracy": 0.9248826291079812, |
|
"eval_f1": 0.9226785544767538, |
|
"eval_loss": 0.34928712248802185, |
|
"eval_roc_auc": 0.9789693127399084, |
|
"eval_runtime": 30.9173, |
|
"eval_samples_per_second": 62.004, |
|
"eval_steps_per_second": 1.941, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 69.17, |
|
"learning_rate": 1.712962962962963e-05, |
|
"loss": 0.2657, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 69.33, |
|
"learning_rate": 1.7037037037037038e-05, |
|
"loss": 0.2904, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 69.5, |
|
"learning_rate": 1.6944444444444446e-05, |
|
"loss": 0.2975, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 69.67, |
|
"learning_rate": 1.6851851851851853e-05, |
|
"loss": 0.2501, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 69.83, |
|
"learning_rate": 1.675925925925926e-05, |
|
"loss": 0.2845, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.2906, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"eval_accuracy": 0.927490871152843, |
|
"eval_f1": 0.9259332994297357, |
|
"eval_loss": 0.3407656252384186, |
|
"eval_roc_auc": 0.9793668129056554, |
|
"eval_runtime": 31.0324, |
|
"eval_samples_per_second": 61.774, |
|
"eval_steps_per_second": 1.933, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 70.17, |
|
"learning_rate": 1.6574074074074075e-05, |
|
"loss": 0.2927, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 70.33, |
|
"learning_rate": 1.6481481481481482e-05, |
|
"loss": 0.2689, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 70.5, |
|
"learning_rate": 1.638888888888889e-05, |
|
"loss": 0.2869, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 70.67, |
|
"learning_rate": 1.62962962962963e-05, |
|
"loss": 0.2907, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 70.83, |
|
"learning_rate": 1.6203703703703704e-05, |
|
"loss": 0.3168, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 71.0, |
|
"learning_rate": 1.6111111111111115e-05, |
|
"loss": 0.2542, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 71.0, |
|
"eval_accuracy": 0.9248826291079812, |
|
"eval_f1": 0.9233405493627096, |
|
"eval_loss": 0.35591500997543335, |
|
"eval_roc_auc": 0.9769011325028507, |
|
"eval_runtime": 30.9651, |
|
"eval_samples_per_second": 61.908, |
|
"eval_steps_per_second": 1.938, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 71.17, |
|
"learning_rate": 1.601851851851852e-05, |
|
"loss": 0.2411, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 71.33, |
|
"learning_rate": 1.5925925925925926e-05, |
|
"loss": 0.2807, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 71.5, |
|
"learning_rate": 1.5833333333333333e-05, |
|
"loss": 0.3036, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 71.67, |
|
"learning_rate": 1.574074074074074e-05, |
|
"loss": 0.2893, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 71.83, |
|
"learning_rate": 1.5648148148148148e-05, |
|
"loss": 0.2895, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 0.2557, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"eval_accuracy": 0.9254042775169535, |
|
"eval_f1": 0.9237261497476135, |
|
"eval_loss": 0.3480711877346039, |
|
"eval_roc_auc": 0.9779474249307349, |
|
"eval_runtime": 30.8763, |
|
"eval_samples_per_second": 62.086, |
|
"eval_steps_per_second": 1.943, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 72.17, |
|
"learning_rate": 1.5462962962962962e-05, |
|
"loss": 0.3108, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 72.33, |
|
"learning_rate": 1.537037037037037e-05, |
|
"loss": 0.294, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 72.5, |
|
"learning_rate": 1.527777777777778e-05, |
|
"loss": 0.2559, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 72.67, |
|
"learning_rate": 1.5185185185185186e-05, |
|
"loss": 0.2842, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 72.83, |
|
"learning_rate": 1.5092592592592595e-05, |
|
"loss": 0.2542, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 73.0, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.2266, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 73.0, |
|
"eval_accuracy": 0.9222743870631195, |
|
"eval_f1": 0.9208402796789302, |
|
"eval_loss": 0.35181114077568054, |
|
"eval_roc_auc": 0.9780920215986283, |
|
"eval_runtime": 31.1669, |
|
"eval_samples_per_second": 61.507, |
|
"eval_steps_per_second": 1.925, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 73.17, |
|
"learning_rate": 1.490740740740741e-05, |
|
"loss": 0.2967, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 73.33, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.2604, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 73.5, |
|
"learning_rate": 1.4722222222222224e-05, |
|
"loss": 0.2861, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 73.67, |
|
"learning_rate": 1.462962962962963e-05, |
|
"loss": 0.2847, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 73.83, |
|
"learning_rate": 1.4537037037037039e-05, |
|
"loss": 0.2558, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 74.0, |
|
"learning_rate": 1.4444444444444444e-05, |
|
"loss": 0.2771, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 74.0, |
|
"eval_accuracy": 0.9254042775169535, |
|
"eval_f1": 0.9230920653274585, |
|
"eval_loss": 0.35441046953201294, |
|
"eval_roc_auc": 0.9775736826942986, |
|
"eval_runtime": 31.0663, |
|
"eval_samples_per_second": 61.707, |
|
"eval_steps_per_second": 1.931, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 74.17, |
|
"learning_rate": 1.4351851851851853e-05, |
|
"loss": 0.3149, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 74.33, |
|
"learning_rate": 1.425925925925926e-05, |
|
"loss": 0.2482, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 74.5, |
|
"learning_rate": 1.4166666666666668e-05, |
|
"loss": 0.2868, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 74.67, |
|
"learning_rate": 1.4074074074074075e-05, |
|
"loss": 0.2285, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 74.83, |
|
"learning_rate": 1.3981481481481482e-05, |
|
"loss": 0.2915, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.2747, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"eval_accuracy": 0.9285341679707877, |
|
"eval_f1": 0.9269627662435742, |
|
"eval_loss": 0.34690794348716736, |
|
"eval_roc_auc": 0.9780339750030952, |
|
"eval_runtime": 30.8369, |
|
"eval_samples_per_second": 62.166, |
|
"eval_steps_per_second": 1.946, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 75.17, |
|
"learning_rate": 1.3796296296296297e-05, |
|
"loss": 0.3009, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 75.33, |
|
"learning_rate": 1.3703703703703704e-05, |
|
"loss": 0.3021, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 75.5, |
|
"learning_rate": 1.3611111111111111e-05, |
|
"loss": 0.2934, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 75.67, |
|
"learning_rate": 1.3518518518518519e-05, |
|
"loss": 0.2476, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 75.83, |
|
"learning_rate": 1.3425925925925928e-05, |
|
"loss": 0.2232, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.2443, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"eval_accuracy": 0.9233176838810642, |
|
"eval_f1": 0.9216258234683066, |
|
"eval_loss": 0.3513363301753998, |
|
"eval_roc_auc": 0.9767332205772213, |
|
"eval_runtime": 31.1882, |
|
"eval_samples_per_second": 61.466, |
|
"eval_steps_per_second": 1.924, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 76.17, |
|
"learning_rate": 1.3240740740740742e-05, |
|
"loss": 0.2741, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 76.33, |
|
"learning_rate": 1.3148148148148148e-05, |
|
"loss": 0.2708, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 76.5, |
|
"learning_rate": 1.3055555555555557e-05, |
|
"loss": 0.2803, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 76.67, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 0.2612, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 76.83, |
|
"learning_rate": 1.2870370370370371e-05, |
|
"loss": 0.277, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 77.0, |
|
"learning_rate": 1.2777777777777777e-05, |
|
"loss": 0.2859, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 77.0, |
|
"eval_accuracy": 0.9254042775169535, |
|
"eval_f1": 0.9234239757485955, |
|
"eval_loss": 0.3456012010574341, |
|
"eval_roc_auc": 0.9771096750183106, |
|
"eval_runtime": 31.0209, |
|
"eval_samples_per_second": 61.797, |
|
"eval_steps_per_second": 1.934, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 77.17, |
|
"learning_rate": 1.2685185185185186e-05, |
|
"loss": 0.2851, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 77.33, |
|
"learning_rate": 1.2592592592592592e-05, |
|
"loss": 0.2572, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 77.5, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.2639, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 77.67, |
|
"learning_rate": 1.2407407407407408e-05, |
|
"loss": 0.2768, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 77.83, |
|
"learning_rate": 1.2314814814814815e-05, |
|
"loss": 0.3115, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 78.0, |
|
"learning_rate": 1.2222222222222222e-05, |
|
"loss": 0.2677, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 78.0, |
|
"eval_accuracy": 0.9254042775169535, |
|
"eval_f1": 0.9238967953597521, |
|
"eval_loss": 0.34742477536201477, |
|
"eval_roc_auc": 0.9780192714539518, |
|
"eval_runtime": 30.9011, |
|
"eval_samples_per_second": 62.037, |
|
"eval_steps_per_second": 1.942, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 78.17, |
|
"learning_rate": 1.212962962962963e-05, |
|
"loss": 0.3025, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 78.33, |
|
"learning_rate": 1.2037037037037037e-05, |
|
"loss": 0.3503, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 78.5, |
|
"learning_rate": 1.1944444444444446e-05, |
|
"loss": 0.2569, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 78.67, |
|
"learning_rate": 1.1851851851851853e-05, |
|
"loss": 0.2529, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 78.83, |
|
"learning_rate": 1.175925925925926e-05, |
|
"loss": 0.2804, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 79.0, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 0.2492, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 79.0, |
|
"eval_accuracy": 0.9254042775169535, |
|
"eval_f1": 0.9234698228772668, |
|
"eval_loss": 0.35134339332580566, |
|
"eval_roc_auc": 0.9777977841330655, |
|
"eval_runtime": 30.9883, |
|
"eval_samples_per_second": 61.862, |
|
"eval_steps_per_second": 1.936, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 79.17, |
|
"learning_rate": 1.1574074074074075e-05, |
|
"loss": 0.265, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 79.33, |
|
"learning_rate": 1.1481481481481482e-05, |
|
"loss": 0.2664, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 79.5, |
|
"learning_rate": 1.138888888888889e-05, |
|
"loss": 0.2669, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 79.67, |
|
"learning_rate": 1.1296296296296297e-05, |
|
"loss": 0.2859, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 79.83, |
|
"learning_rate": 1.1203703703703704e-05, |
|
"loss": 0.2518, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.2532, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"eval_accuracy": 0.9233176838810642, |
|
"eval_f1": 0.9210239732637377, |
|
"eval_loss": 0.3524177074432373, |
|
"eval_roc_auc": 0.9773028713209709, |
|
"eval_runtime": 30.9122, |
|
"eval_samples_per_second": 62.014, |
|
"eval_steps_per_second": 1.941, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 80.17, |
|
"learning_rate": 1.1018518518518519e-05, |
|
"loss": 0.2829, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 80.33, |
|
"learning_rate": 1.0925925925925926e-05, |
|
"loss": 0.2988, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 80.5, |
|
"learning_rate": 1.0833333333333334e-05, |
|
"loss": 0.2675, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 80.67, |
|
"learning_rate": 1.074074074074074e-05, |
|
"loss": 0.232, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 80.83, |
|
"learning_rate": 1.0648148148148148e-05, |
|
"loss": 0.3254, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 81.0, |
|
"learning_rate": 1.0555555555555555e-05, |
|
"loss": 0.2646, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 81.0, |
|
"eval_accuracy": 0.9238393322900366, |
|
"eval_f1": 0.9239580070656279, |
|
"eval_loss": 0.35290443897247314, |
|
"eval_roc_auc": 0.9783679016873998, |
|
"eval_runtime": 30.9829, |
|
"eval_samples_per_second": 61.873, |
|
"eval_steps_per_second": 1.937, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 81.17, |
|
"learning_rate": 1.0462962962962963e-05, |
|
"loss": 0.2492, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 81.33, |
|
"learning_rate": 1.037037037037037e-05, |
|
"loss": 0.2775, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 81.5, |
|
"learning_rate": 1.0277777777777777e-05, |
|
"loss": 0.2573, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 81.67, |
|
"learning_rate": 1.0185185185185185e-05, |
|
"loss": 0.2504, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 81.83, |
|
"learning_rate": 1.0092592592592594e-05, |
|
"loss": 0.31, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 82.0, |
|
"learning_rate": 1e-05, |
|
"loss": 0.2842, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 82.0, |
|
"eval_accuracy": 0.9280125195618153, |
|
"eval_f1": 0.9260333195816027, |
|
"eval_loss": 0.34334856271743774, |
|
"eval_roc_auc": 0.977665008318065, |
|
"eval_runtime": 31.0483, |
|
"eval_samples_per_second": 61.742, |
|
"eval_steps_per_second": 1.932, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 82.17, |
|
"learning_rate": 9.907407407407408e-06, |
|
"loss": 0.2765, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 82.33, |
|
"learning_rate": 9.814814814814815e-06, |
|
"loss": 0.3014, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 82.5, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.2825, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 82.67, |
|
"learning_rate": 9.62962962962963e-06, |
|
"loss": 0.2874, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 82.83, |
|
"learning_rate": 9.537037037037037e-06, |
|
"loss": 0.2796, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 83.0, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.2872, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 83.0, |
|
"eval_accuracy": 0.9285341679707877, |
|
"eval_f1": 0.9271958003115927, |
|
"eval_loss": 0.35838404297828674, |
|
"eval_roc_auc": 0.9770701360759796, |
|
"eval_runtime": 31.004, |
|
"eval_samples_per_second": 61.831, |
|
"eval_steps_per_second": 1.935, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 83.17, |
|
"learning_rate": 9.351851851851852e-06, |
|
"loss": 0.2552, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 83.33, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 0.2755, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 83.5, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.3196, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 83.67, |
|
"learning_rate": 9.074074074074075e-06, |
|
"loss": 0.2738, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 83.83, |
|
"learning_rate": 8.981481481481483e-06, |
|
"loss": 0.2764, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"learning_rate": 8.88888888888889e-06, |
|
"loss": 0.2678, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"eval_accuracy": 0.9316640584246219, |
|
"eval_f1": 0.9297928229609359, |
|
"eval_loss": 0.3429672122001648, |
|
"eval_roc_auc": 0.9777372174983401, |
|
"eval_runtime": 30.8771, |
|
"eval_samples_per_second": 62.085, |
|
"eval_steps_per_second": 1.943, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 84.17, |
|
"learning_rate": 8.796296296296297e-06, |
|
"loss": 0.2943, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 84.33, |
|
"learning_rate": 8.703703703703705e-06, |
|
"loss": 0.2491, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 84.5, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.2702, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 84.67, |
|
"learning_rate": 8.518518518518519e-06, |
|
"loss": 0.2818, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 84.83, |
|
"learning_rate": 8.425925925925926e-06, |
|
"loss": 0.2681, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 85.0, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.2705, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 85.0, |
|
"eval_accuracy": 0.9290558163797601, |
|
"eval_f1": 0.9268182369344703, |
|
"eval_loss": 0.3533850312232971, |
|
"eval_roc_auc": 0.9777106946227319, |
|
"eval_runtime": 30.756, |
|
"eval_samples_per_second": 62.329, |
|
"eval_steps_per_second": 1.951, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 85.17, |
|
"learning_rate": 8.240740740740741e-06, |
|
"loss": 0.2345, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 85.33, |
|
"learning_rate": 8.14814814814815e-06, |
|
"loss": 0.2591, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 85.5, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.2629, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 85.67, |
|
"learning_rate": 7.962962962962963e-06, |
|
"loss": 0.2509, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 85.83, |
|
"learning_rate": 7.87037037037037e-06, |
|
"loss": 0.2657, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 86.0, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 0.2605, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 86.0, |
|
"eval_accuracy": 0.9295774647887324, |
|
"eval_f1": 0.9271674219062209, |
|
"eval_loss": 0.35735803842544556, |
|
"eval_roc_auc": 0.9776619601955235, |
|
"eval_runtime": 31.1184, |
|
"eval_samples_per_second": 61.603, |
|
"eval_steps_per_second": 1.928, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 86.17, |
|
"learning_rate": 7.685185185185185e-06, |
|
"loss": 0.274, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 86.33, |
|
"learning_rate": 7.592592592592593e-06, |
|
"loss": 0.2786, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 86.5, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.2748, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 86.67, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.2589, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 86.83, |
|
"learning_rate": 7.314814814814815e-06, |
|
"loss": 0.2675, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 87.0, |
|
"learning_rate": 7.222222222222222e-06, |
|
"loss": 0.2572, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 87.0, |
|
"eval_accuracy": 0.9290558163797601, |
|
"eval_f1": 0.927251726970012, |
|
"eval_loss": 0.34258973598480225, |
|
"eval_roc_auc": 0.9780606091514321, |
|
"eval_runtime": 31.1961, |
|
"eval_samples_per_second": 61.45, |
|
"eval_steps_per_second": 1.923, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 87.17, |
|
"learning_rate": 7.12962962962963e-06, |
|
"loss": 0.2674, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 87.33, |
|
"learning_rate": 7.0370370370370375e-06, |
|
"loss": 0.2552, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 87.5, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.2634, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 87.67, |
|
"learning_rate": 6.851851851851852e-06, |
|
"loss": 0.2737, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 87.83, |
|
"learning_rate": 6.759259259259259e-06, |
|
"loss": 0.2788, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.2646, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"eval_accuracy": 0.9243609806990088, |
|
"eval_f1": 0.9233615308327541, |
|
"eval_loss": 0.3472360372543335, |
|
"eval_roc_auc": 0.978910913704422, |
|
"eval_runtime": 30.9382, |
|
"eval_samples_per_second": 61.962, |
|
"eval_steps_per_second": 1.939, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 88.17, |
|
"learning_rate": 6.574074074074074e-06, |
|
"loss": 0.2603, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 88.33, |
|
"learning_rate": 6.481481481481481e-06, |
|
"loss": 0.2721, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 88.5, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.2864, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 88.67, |
|
"learning_rate": 6.296296296296296e-06, |
|
"loss": 0.2517, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 88.83, |
|
"learning_rate": 6.203703703703704e-06, |
|
"loss": 0.2494, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 89.0, |
|
"learning_rate": 6.111111111111111e-06, |
|
"loss": 0.2831, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 89.0, |
|
"eval_accuracy": 0.9290558163797601, |
|
"eval_f1": 0.9272407593779144, |
|
"eval_loss": 0.34334689378738403, |
|
"eval_roc_auc": 0.9778616671938408, |
|
"eval_runtime": 30.9945, |
|
"eval_samples_per_second": 61.85, |
|
"eval_steps_per_second": 1.936, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 89.17, |
|
"learning_rate": 6.0185185185185185e-06, |
|
"loss": 0.2497, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 89.33, |
|
"learning_rate": 5.925925925925927e-06, |
|
"loss": 0.2433, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 89.5, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.2521, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 89.67, |
|
"learning_rate": 5.740740740740741e-06, |
|
"loss": 0.2343, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 89.83, |
|
"learning_rate": 5.6481481481481485e-06, |
|
"loss": 0.2693, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 90.0, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.277, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 90.0, |
|
"eval_accuracy": 0.9280125195618153, |
|
"eval_f1": 0.9262715742423608, |
|
"eval_loss": 0.3440592885017395, |
|
"eval_roc_auc": 0.9788712016876208, |
|
"eval_runtime": 31.049, |
|
"eval_samples_per_second": 61.741, |
|
"eval_steps_per_second": 1.932, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 90.17, |
|
"learning_rate": 5.462962962962963e-06, |
|
"loss": 0.2516, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 90.33, |
|
"learning_rate": 5.37037037037037e-06, |
|
"loss": 0.278, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 90.5, |
|
"learning_rate": 5.277777777777778e-06, |
|
"loss": 0.2603, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 90.67, |
|
"learning_rate": 5.185185185185185e-06, |
|
"loss": 0.244, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 90.83, |
|
"learning_rate": 5.092592592592592e-06, |
|
"loss": 0.2609, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 91.0, |
|
"learning_rate": 5e-06, |
|
"loss": 0.2584, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 91.0, |
|
"eval_accuracy": 0.9248826291079812, |
|
"eval_f1": 0.9236426065868731, |
|
"eval_loss": 0.3431851267814636, |
|
"eval_roc_auc": 0.9787553845371705, |
|
"eval_runtime": 30.977, |
|
"eval_samples_per_second": 61.885, |
|
"eval_steps_per_second": 1.937, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 91.17, |
|
"learning_rate": 4.907407407407408e-06, |
|
"loss": 0.2829, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 91.33, |
|
"learning_rate": 4.814814814814815e-06, |
|
"loss": 0.2694, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 91.5, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.2226, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 91.67, |
|
"learning_rate": 4.6296296296296296e-06, |
|
"loss": 0.2856, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 91.83, |
|
"learning_rate": 4.537037037037038e-06, |
|
"loss": 0.2646, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"learning_rate": 4.444444444444445e-06, |
|
"loss": 0.2703, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"eval_accuracy": 0.9259259259259259, |
|
"eval_f1": 0.9247749239486609, |
|
"eval_loss": 0.3409390151500702, |
|
"eval_roc_auc": 0.9789125551817118, |
|
"eval_runtime": 31.0632, |
|
"eval_samples_per_second": 61.713, |
|
"eval_steps_per_second": 1.932, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 92.17, |
|
"learning_rate": 4.351851851851852e-06, |
|
"loss": 0.2429, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 92.33, |
|
"learning_rate": 4.2592592592592596e-06, |
|
"loss": 0.2819, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 92.5, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.2748, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 92.67, |
|
"learning_rate": 4.074074074074075e-06, |
|
"loss": 0.2526, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 92.83, |
|
"learning_rate": 3.9814814814814814e-06, |
|
"loss": 0.2468, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 93.0, |
|
"learning_rate": 3.888888888888889e-06, |
|
"loss": 0.2811, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 93.0, |
|
"eval_accuracy": 0.9227960354720918, |
|
"eval_f1": 0.9214672904997486, |
|
"eval_loss": 0.34487679600715637, |
|
"eval_roc_auc": 0.9795045467703533, |
|
"eval_runtime": 30.8936, |
|
"eval_samples_per_second": 62.052, |
|
"eval_steps_per_second": 1.942, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 93.17, |
|
"learning_rate": 3.7962962962962964e-06, |
|
"loss": 0.2401, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 93.33, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.262, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 93.5, |
|
"learning_rate": 3.611111111111111e-06, |
|
"loss": 0.2586, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 93.67, |
|
"learning_rate": 3.5185185185185187e-06, |
|
"loss": 0.2708, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 93.83, |
|
"learning_rate": 3.425925925925926e-06, |
|
"loss": 0.2627, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 94.0, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.2786, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 94.0, |
|
"eval_accuracy": 0.9280125195618153, |
|
"eval_f1": 0.925961217058845, |
|
"eval_loss": 0.34653154015541077, |
|
"eval_roc_auc": 0.9788772928385914, |
|
"eval_runtime": 30.9745, |
|
"eval_samples_per_second": 61.89, |
|
"eval_steps_per_second": 1.937, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 94.17, |
|
"learning_rate": 3.2407407407407406e-06, |
|
"loss": 0.2449, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 94.33, |
|
"learning_rate": 3.148148148148148e-06, |
|
"loss": 0.2742, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 94.5, |
|
"learning_rate": 3.0555555555555556e-06, |
|
"loss": 0.2185, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 94.67, |
|
"learning_rate": 2.9629629629629633e-06, |
|
"loss": 0.2374, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 94.83, |
|
"learning_rate": 2.8703703703703706e-06, |
|
"loss": 0.2448, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 95.0, |
|
"learning_rate": 2.777777777777778e-06, |
|
"loss": 0.267, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 95.0, |
|
"eval_accuracy": 0.927490871152843, |
|
"eval_f1": 0.926020550825123, |
|
"eval_loss": 0.3472473919391632, |
|
"eval_roc_auc": 0.9791268419832962, |
|
"eval_runtime": 31.0494, |
|
"eval_samples_per_second": 61.74, |
|
"eval_steps_per_second": 1.932, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 95.17, |
|
"learning_rate": 2.685185185185185e-06, |
|
"loss": 0.2583, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 95.33, |
|
"learning_rate": 2.5925925925925925e-06, |
|
"loss": 0.2697, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 95.5, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.2727, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 95.67, |
|
"learning_rate": 2.4074074074074075e-06, |
|
"loss": 0.244, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 95.83, |
|
"learning_rate": 2.3148148148148148e-06, |
|
"loss": 0.2818, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"learning_rate": 2.2222222222222225e-06, |
|
"loss": 0.2695, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"eval_accuracy": 0.9285341679707877, |
|
"eval_f1": 0.926806533960345, |
|
"eval_loss": 0.34995850920677185, |
|
"eval_roc_auc": 0.9785719343176981, |
|
"eval_runtime": 30.9084, |
|
"eval_samples_per_second": 62.022, |
|
"eval_steps_per_second": 1.941, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 96.17, |
|
"learning_rate": 2.1296296296296298e-06, |
|
"loss": 0.2907, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 96.33, |
|
"learning_rate": 2.0370370370370375e-06, |
|
"loss": 0.2722, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 96.5, |
|
"learning_rate": 1.9444444444444444e-06, |
|
"loss": 0.2875, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 96.67, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 0.2591, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 96.83, |
|
"learning_rate": 1.7592592592592594e-06, |
|
"loss": 0.2508, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 97.0, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.279, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 97.0, |
|
"eval_accuracy": 0.9269692227438706, |
|
"eval_f1": 0.9248843021081147, |
|
"eval_loss": 0.35818785429000854, |
|
"eval_roc_auc": 0.9782476583971591, |
|
"eval_runtime": 30.9227, |
|
"eval_samples_per_second": 61.993, |
|
"eval_steps_per_second": 1.94, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 97.17, |
|
"learning_rate": 1.574074074074074e-06, |
|
"loss": 0.2663, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 97.33, |
|
"learning_rate": 1.4814814814814817e-06, |
|
"loss": 0.2585, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 97.5, |
|
"learning_rate": 1.388888888888889e-06, |
|
"loss": 0.255, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 97.67, |
|
"learning_rate": 1.2962962962962962e-06, |
|
"loss": 0.2737, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 97.83, |
|
"learning_rate": 1.2037037037037037e-06, |
|
"loss": 0.271, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 98.0, |
|
"learning_rate": 1.1111111111111112e-06, |
|
"loss": 0.2774, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 98.0, |
|
"eval_accuracy": 0.9269692227438706, |
|
"eval_f1": 0.9250939587511013, |
|
"eval_loss": 0.3485565483570099, |
|
"eval_roc_auc": 0.9790459422116572, |
|
"eval_runtime": 31.1331, |
|
"eval_samples_per_second": 61.574, |
|
"eval_steps_per_second": 1.927, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 98.17, |
|
"learning_rate": 1.0185185185185188e-06, |
|
"loss": 0.2523, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 98.33, |
|
"learning_rate": 9.259259259259259e-07, |
|
"loss": 0.304, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 98.5, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.254, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 98.67, |
|
"learning_rate": 7.407407407407408e-07, |
|
"loss": 0.2096, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 98.83, |
|
"learning_rate": 6.481481481481481e-07, |
|
"loss": 0.2487, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 99.0, |
|
"learning_rate": 5.555555555555556e-07, |
|
"loss": 0.2512, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 99.0, |
|
"eval_accuracy": 0.9306207616066771, |
|
"eval_f1": 0.9287006146320661, |
|
"eval_loss": 0.3514097332954407, |
|
"eval_roc_auc": 0.9786490635988997, |
|
"eval_runtime": 30.7798, |
|
"eval_samples_per_second": 62.281, |
|
"eval_steps_per_second": 1.949, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 99.17, |
|
"learning_rate": 4.6296296296296297e-07, |
|
"loss": 0.2616, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 99.33, |
|
"learning_rate": 3.703703703703704e-07, |
|
"loss": 0.2575, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 99.5, |
|
"learning_rate": 2.777777777777778e-07, |
|
"loss": 0.2419, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 99.67, |
|
"learning_rate": 1.851851851851852e-07, |
|
"loss": 0.2778, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 99.83, |
|
"learning_rate": 9.25925925925926e-08, |
|
"loss": 0.2805, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.2218, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"eval_accuracy": 0.9285341679707877, |
|
"eval_f1": 0.9269190721647167, |
|
"eval_loss": 0.34819331765174866, |
|
"eval_roc_auc": 0.9788880411666135, |
|
"eval_runtime": 31.0647, |
|
"eval_samples_per_second": 61.71, |
|
"eval_steps_per_second": 1.931, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"step": 6000, |
|
"total_flos": 1.357738787414016e+19, |
|
"train_loss": 0.3802111453215281, |
|
"train_runtime": 17260.548, |
|
"train_samples_per_second": 44.402, |
|
"train_steps_per_second": 0.348 |
|
} |
|
], |
|
"max_steps": 6000, |
|
"num_train_epochs": 100, |
|
"total_flos": 1.357738787414016e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|