|
{ |
|
"best_metric": 0.9162458181381226, |
|
"best_model_checkpoint": "./checkpoint-4000", |
|
"epoch": 50.63291139240506, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.7499999999999996e-07, |
|
"loss": 23.4903, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 7.499999999999999e-07, |
|
"loss": 21.1248, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.1249999999999998e-06, |
|
"loss": 22.9317, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.4625e-06, |
|
"loss": 20.6205, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8375e-06, |
|
"loss": 22.2659, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.2124999999999996e-06, |
|
"loss": 21.4276, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.5875e-06, |
|
"loss": 21.8665, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.9624999999999996e-06, |
|
"loss": 20.8487, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.3374999999999994e-06, |
|
"loss": 21.838, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.7125e-06, |
|
"loss": 19.0875, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.087499999999999e-06, |
|
"loss": 19.3293, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.462499999999999e-06, |
|
"loss": 16.2192, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.8375e-06, |
|
"loss": 15.0126, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 5.2125e-06, |
|
"loss": 13.5756, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 5.5874999999999994e-06, |
|
"loss": 12.5467, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.962499999999999e-06, |
|
"loss": 11.8743, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 6.3375e-06, |
|
"loss": 11.4262, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 6.712499999999999e-06, |
|
"loss": 10.3319, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 7.0874999999999995e-06, |
|
"loss": 10.0354, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.4625e-06, |
|
"loss": 9.8568, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.837499999999999e-06, |
|
"loss": 9.4804, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 8.2125e-06, |
|
"loss": 9.0891, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.5875e-06, |
|
"loss": 8.6768, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.9625e-06, |
|
"loss": 8.5948, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 9.3375e-06, |
|
"loss": 8.4701, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 9.712499999999999e-06, |
|
"loss": 8.2693, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.00875e-05, |
|
"loss": 8.0611, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.04625e-05, |
|
"loss": 8.0222, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.0837499999999997e-05, |
|
"loss": 7.5698, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.1212499999999998e-05, |
|
"loss": 7.5901, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.1587499999999999e-05, |
|
"loss": 7.2712, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.19625e-05, |
|
"loss": 7.4485, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.23375e-05, |
|
"loss": 7.1484, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.2712499999999999e-05, |
|
"loss": 7.1105, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.3087499999999998e-05, |
|
"loss": 6.8399, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.3462499999999999e-05, |
|
"loss": 6.8718, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.38375e-05, |
|
"loss": 6.5045, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.4212499999999998e-05, |
|
"loss": 6.574, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.4587499999999999e-05, |
|
"loss": 6.1716, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.49625e-05, |
|
"loss": 6.3618, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.5337499999999997e-05, |
|
"loss": 6.012, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.57125e-05, |
|
"loss": 6.0979, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 1.6087499999999998e-05, |
|
"loss": 5.7887, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 1.6462499999999997e-05, |
|
"loss": 5.892, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 1.68375e-05, |
|
"loss": 5.4672, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 1.7212499999999998e-05, |
|
"loss": 5.6353, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 1.7587499999999997e-05, |
|
"loss": 5.2754, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.7962499999999996e-05, |
|
"loss": 5.3343, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.83375e-05, |
|
"loss": 5.0586, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.8712499999999997e-05, |
|
"loss": 5.0767, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"eval_cer": 1.0, |
|
"eval_loss": 4.878269672393799, |
|
"eval_runtime": 44.5452, |
|
"eval_samples_per_second": 10.798, |
|
"eval_steps_per_second": 1.369, |
|
"eval_wer": 1.0, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.90875e-05, |
|
"loss": 4.7637, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.94625e-05, |
|
"loss": 4.8495, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.9837499999999998e-05, |
|
"loss": 4.5651, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 2.02125e-05, |
|
"loss": 4.7084, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 2.05875e-05, |
|
"loss": 4.3947, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 2.09625e-05, |
|
"loss": 4.4641, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 2.1337499999999997e-05, |
|
"loss": 4.2175, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.1712499999999996e-05, |
|
"loss": 4.2767, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 2.2087499999999998e-05, |
|
"loss": 4.0216, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 2.2462499999999997e-05, |
|
"loss": 4.0459, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.2837499999999996e-05, |
|
"loss": 3.8726, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 2.32125e-05, |
|
"loss": 3.8982, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.3587499999999997e-05, |
|
"loss": 3.7419, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 2.39625e-05, |
|
"loss": 3.7623, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 2.43375e-05, |
|
"loss": 3.6173, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 2.4712499999999998e-05, |
|
"loss": 3.625, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 2.50875e-05, |
|
"loss": 3.4981, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 2.54625e-05, |
|
"loss": 3.5114, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 2.5837499999999994e-05, |
|
"loss": 3.42, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 2.6212499999999997e-05, |
|
"loss": 3.4173, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 2.6587499999999996e-05, |
|
"loss": 3.3501, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 2.6962499999999998e-05, |
|
"loss": 3.3289, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 2.7337499999999997e-05, |
|
"loss": 3.2786, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 2.7712499999999996e-05, |
|
"loss": 3.2836, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.80875e-05, |
|
"loss": 3.2514, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 2.8462499999999997e-05, |
|
"loss": 3.2778, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 2.88375e-05, |
|
"loss": 3.2042, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 2.92125e-05, |
|
"loss": 3.1964, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 2.9587499999999998e-05, |
|
"loss": 3.2052, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 2.99625e-05, |
|
"loss": 3.1989, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 3.03375e-05, |
|
"loss": 3.1823, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 3.0712499999999994e-05, |
|
"loss": 3.1613, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 3.10875e-05, |
|
"loss": 3.1659, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 3.14625e-05, |
|
"loss": 3.1798, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 3.1837499999999995e-05, |
|
"loss": 3.1711, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 3.22125e-05, |
|
"loss": 3.1659, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 3.25875e-05, |
|
"loss": 3.1309, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 3.2962499999999995e-05, |
|
"loss": 3.1706, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 3.33375e-05, |
|
"loss": 3.1441, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 3.37125e-05, |
|
"loss": 3.1341, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 3.4087499999999995e-05, |
|
"loss": 3.1594, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 3.44625e-05, |
|
"loss": 3.1262, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 3.48375e-05, |
|
"loss": 3.1541, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 11.9, |
|
"learning_rate": 3.5212499999999995e-05, |
|
"loss": 3.1393, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 3.55875e-05, |
|
"loss": 3.1398, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 3.596249999999999e-05, |
|
"loss": 3.1551, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"learning_rate": 3.6337499999999996e-05, |
|
"loss": 3.1391, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 3.67125e-05, |
|
"loss": 3.1236, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 3.7087499999999993e-05, |
|
"loss": 3.1327, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 3.7462499999999996e-05, |
|
"loss": 3.1156, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"eval_cer": 1.0, |
|
"eval_loss": 3.0990264415740967, |
|
"eval_runtime": 44.9243, |
|
"eval_samples_per_second": 10.707, |
|
"eval_steps_per_second": 1.358, |
|
"eval_wer": 1.0, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"learning_rate": 3.783749999999999e-05, |
|
"loss": 3.1365, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 3.8212499999999994e-05, |
|
"loss": 3.1123, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"learning_rate": 3.8587499999999996e-05, |
|
"loss": 3.132, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 3.896249999999999e-05, |
|
"loss": 3.1218, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 3.9337499999999994e-05, |
|
"loss": 3.1266, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 3.9712499999999996e-05, |
|
"loss": 3.1247, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 4.008749999999999e-05, |
|
"loss": 3.1203, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 4.0462499999999994e-05, |
|
"loss": 3.1071, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 4.0837499999999997e-05, |
|
"loss": 3.1095, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 13.92, |
|
"learning_rate": 4.12125e-05, |
|
"loss": 3.0724, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"learning_rate": 4.1587499999999994e-05, |
|
"loss": 3.1, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 4.19625e-05, |
|
"loss": 3.0862, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"learning_rate": 4.23375e-05, |
|
"loss": 3.1141, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 4.2712499999999995e-05, |
|
"loss": 3.0847, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 4.30875e-05, |
|
"loss": 3.0845, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"learning_rate": 4.34625e-05, |
|
"loss": 3.0537, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"learning_rate": 4.3837499999999995e-05, |
|
"loss": 3.0811, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 14.94, |
|
"learning_rate": 4.42125e-05, |
|
"loss": 3.031, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 15.06, |
|
"learning_rate": 4.45875e-05, |
|
"loss": 3.0431, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"learning_rate": 4.4962499999999995e-05, |
|
"loss": 2.9891, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"learning_rate": 4.53375e-05, |
|
"loss": 2.9511, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"learning_rate": 4.57125e-05, |
|
"loss": 2.8874, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 15.57, |
|
"learning_rate": 4.60875e-05, |
|
"loss": 2.8216, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"learning_rate": 4.64625e-05, |
|
"loss": 2.7211, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 4.68375e-05, |
|
"loss": 2.6755, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"learning_rate": 4.721249999999999e-05, |
|
"loss": 2.5301, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 4.758749999999999e-05, |
|
"loss": 2.4484, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 4.7962499999999994e-05, |
|
"loss": 2.2522, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"learning_rate": 4.8337499999999996e-05, |
|
"loss": 2.1895, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"learning_rate": 4.871249999999999e-05, |
|
"loss": 2.0274, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 4.9087499999999994e-05, |
|
"loss": 1.9528, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"learning_rate": 4.9462499999999996e-05, |
|
"loss": 1.8849, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"learning_rate": 4.983749999999999e-05, |
|
"loss": 1.8432, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"learning_rate": 5.0212499999999994e-05, |
|
"loss": 1.7181, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 5.0587499999999996e-05, |
|
"loss": 1.7099, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"learning_rate": 5.096249999999999e-05, |
|
"loss": 1.6276, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 5.1337499999999994e-05, |
|
"loss": 1.6456, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"learning_rate": 5.1712499999999997e-05, |
|
"loss": 1.5613, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"learning_rate": 5.20875e-05, |
|
"loss": 1.5553, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"learning_rate": 5.2462499999999994e-05, |
|
"loss": 1.4874, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"learning_rate": 5.28375e-05, |
|
"loss": 1.5079, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 5.32125e-05, |
|
"loss": 1.4935, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 18.1, |
|
"learning_rate": 5.3587499999999995e-05, |
|
"loss": 1.4843, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"learning_rate": 5.39625e-05, |
|
"loss": 1.416, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"learning_rate": 5.43375e-05, |
|
"loss": 1.3815, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"learning_rate": 5.4712499999999995e-05, |
|
"loss": 1.4005, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 5.50875e-05, |
|
"loss": 1.3824, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"learning_rate": 5.54625e-05, |
|
"loss": 1.3038, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"learning_rate": 5.58375e-05, |
|
"loss": 1.4168, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"learning_rate": 5.62125e-05, |
|
"loss": 1.3506, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"eval_cer": 0.28892684849736266, |
|
"eval_loss": 1.1056294441223145, |
|
"eval_runtime": 44.5929, |
|
"eval_samples_per_second": 10.786, |
|
"eval_steps_per_second": 1.368, |
|
"eval_wer": 0.7031036834924966, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 5.658749999999999e-05, |
|
"loss": 1.3407, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 5.696249999999999e-05, |
|
"loss": 1.2849, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 5.733749999999999e-05, |
|
"loss": 1.3141, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"learning_rate": 5.771249999999999e-05, |
|
"loss": 1.2858, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 5.8087499999999996e-05, |
|
"loss": 1.2842, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 5.846249999999999e-05, |
|
"loss": 1.2537, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 19.87, |
|
"learning_rate": 5.8837499999999994e-05, |
|
"loss": 1.304, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 5.9212499999999996e-05, |
|
"loss": 1.243, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 20.13, |
|
"learning_rate": 5.958749999999999e-05, |
|
"loss": 1.2636, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 20.25, |
|
"learning_rate": 5.9962499999999994e-05, |
|
"loss": 1.2239, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 20.38, |
|
"learning_rate": 6.0337499999999996e-05, |
|
"loss": 1.267, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 20.51, |
|
"learning_rate": 6.071249999999999e-05, |
|
"loss": 1.216, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 20.63, |
|
"learning_rate": 6.10875e-05, |
|
"loss": 1.253, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 20.76, |
|
"learning_rate": 6.14625e-05, |
|
"loss": 1.2137, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 20.89, |
|
"learning_rate": 6.183749999999999e-05, |
|
"loss": 1.1845, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 21.01, |
|
"learning_rate": 6.22125e-05, |
|
"loss": 1.1501, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 21.14, |
|
"learning_rate": 6.25875e-05, |
|
"loss": 1.1904, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 21.27, |
|
"learning_rate": 6.296249999999999e-05, |
|
"loss": 1.1456, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 21.39, |
|
"learning_rate": 6.33375e-05, |
|
"loss": 1.1798, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 21.52, |
|
"learning_rate": 6.37125e-05, |
|
"loss": 1.1122, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 21.65, |
|
"learning_rate": 6.408749999999999e-05, |
|
"loss": 1.1512, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 21.77, |
|
"learning_rate": 6.44625e-05, |
|
"loss": 1.1413, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 21.9, |
|
"learning_rate": 6.48375e-05, |
|
"loss": 1.1652, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 22.03, |
|
"learning_rate": 6.521249999999999e-05, |
|
"loss": 1.177, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 22.15, |
|
"learning_rate": 6.55875e-05, |
|
"loss": 1.1326, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 22.28, |
|
"learning_rate": 6.596249999999998e-05, |
|
"loss": 1.1082, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 22.41, |
|
"learning_rate": 6.633749999999999e-05, |
|
"loss": 1.1321, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 22.53, |
|
"learning_rate": 6.671249999999999e-05, |
|
"loss": 1.0721, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 22.66, |
|
"learning_rate": 6.70875e-05, |
|
"loss": 1.1199, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 22.78, |
|
"learning_rate": 6.746249999999999e-05, |
|
"loss": 1.1436, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 22.91, |
|
"learning_rate": 6.783749999999999e-05, |
|
"loss": 1.0941, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 23.04, |
|
"learning_rate": 6.82125e-05, |
|
"loss": 1.0631, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 23.16, |
|
"learning_rate": 6.85875e-05, |
|
"loss": 1.064, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 23.29, |
|
"learning_rate": 6.896249999999999e-05, |
|
"loss": 1.0739, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 23.42, |
|
"learning_rate": 6.93375e-05, |
|
"loss": 1.0354, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 23.54, |
|
"learning_rate": 6.97125e-05, |
|
"loss": 1.0343, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 23.67, |
|
"learning_rate": 7.008749999999999e-05, |
|
"loss": 1.0724, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 23.8, |
|
"learning_rate": 7.04625e-05, |
|
"loss": 1.0982, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 23.92, |
|
"learning_rate": 7.08375e-05, |
|
"loss": 1.065, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 24.05, |
|
"learning_rate": 7.121249999999999e-05, |
|
"loss": 1.0754, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 24.18, |
|
"learning_rate": 7.15875e-05, |
|
"loss": 1.0708, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 24.3, |
|
"learning_rate": 7.19625e-05, |
|
"loss": 1.0165, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 24.43, |
|
"learning_rate": 7.233749999999999e-05, |
|
"loss": 1.02, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 24.56, |
|
"learning_rate": 7.27125e-05, |
|
"loss": 1.0985, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 24.68, |
|
"learning_rate": 7.30875e-05, |
|
"loss": 0.9746, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 24.81, |
|
"learning_rate": 7.346249999999999e-05, |
|
"loss": 1.0644, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 24.94, |
|
"learning_rate": 7.38375e-05, |
|
"loss": 1.0104, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 25.06, |
|
"learning_rate": 7.42125e-05, |
|
"loss": 1.028, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 25.19, |
|
"learning_rate": 7.45875e-05, |
|
"loss": 1.0107, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 25.32, |
|
"learning_rate": 7.49625e-05, |
|
"loss": 0.9997, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 25.32, |
|
"eval_cer": 0.2301165693690988, |
|
"eval_loss": 0.919084906578064, |
|
"eval_runtime": 44.703, |
|
"eval_samples_per_second": 10.76, |
|
"eval_steps_per_second": 1.365, |
|
"eval_wer": 0.5943894952251023, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 25.44, |
|
"learning_rate": 7.465384615384615e-05, |
|
"loss": 0.9571, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 25.57, |
|
"learning_rate": 7.426923076923075e-05, |
|
"loss": 0.9801, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 25.7, |
|
"learning_rate": 7.388461538461538e-05, |
|
"loss": 0.9779, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 25.82, |
|
"learning_rate": 7.35e-05, |
|
"loss": 1.0168, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 25.95, |
|
"learning_rate": 7.31153846153846e-05, |
|
"loss": 0.9302, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 26.08, |
|
"learning_rate": 7.273076923076923e-05, |
|
"loss": 0.989, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 26.2, |
|
"learning_rate": 7.234615384615385e-05, |
|
"loss": 0.9357, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 26.33, |
|
"learning_rate": 7.196153846153846e-05, |
|
"loss": 0.9858, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 26.46, |
|
"learning_rate": 7.157692307692307e-05, |
|
"loss": 0.9813, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 26.58, |
|
"learning_rate": 7.119230769230769e-05, |
|
"loss": 0.9554, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 26.71, |
|
"learning_rate": 7.08076923076923e-05, |
|
"loss": 0.8935, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 26.84, |
|
"learning_rate": 7.042307692307692e-05, |
|
"loss": 0.9955, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 26.96, |
|
"learning_rate": 7.003846153846154e-05, |
|
"loss": 0.9205, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 27.09, |
|
"learning_rate": 6.965384615384615e-05, |
|
"loss": 0.9527, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 27.22, |
|
"learning_rate": 6.926923076923075e-05, |
|
"loss": 0.8899, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 27.34, |
|
"learning_rate": 6.888461538461538e-05, |
|
"loss": 0.9594, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 27.47, |
|
"learning_rate": 6.85e-05, |
|
"loss": 0.9061, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 27.59, |
|
"learning_rate": 6.81153846153846e-05, |
|
"loss": 0.94, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 27.72, |
|
"learning_rate": 6.773076923076923e-05, |
|
"loss": 0.8611, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 27.85, |
|
"learning_rate": 6.734615384615385e-05, |
|
"loss": 0.9391, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 27.97, |
|
"learning_rate": 6.696153846153846e-05, |
|
"loss": 0.8905, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 28.1, |
|
"learning_rate": 6.657692307692307e-05, |
|
"loss": 0.888, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 28.23, |
|
"learning_rate": 6.619230769230769e-05, |
|
"loss": 0.8749, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 28.35, |
|
"learning_rate": 6.580769230769231e-05, |
|
"loss": 0.9331, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 28.48, |
|
"learning_rate": 6.542307692307692e-05, |
|
"loss": 0.8135, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 28.61, |
|
"learning_rate": 6.503846153846154e-05, |
|
"loss": 0.9121, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 28.73, |
|
"learning_rate": 6.465384615384615e-05, |
|
"loss": 0.859, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 28.86, |
|
"learning_rate": 6.426923076923076e-05, |
|
"loss": 0.8726, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 28.99, |
|
"learning_rate": 6.388461538461538e-05, |
|
"loss": 0.8497, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 29.11, |
|
"learning_rate": 6.35e-05, |
|
"loss": 0.8673, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 29.24, |
|
"learning_rate": 6.31153846153846e-05, |
|
"loss": 0.8349, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 29.37, |
|
"learning_rate": 6.273076923076923e-05, |
|
"loss": 0.8946, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 29.49, |
|
"learning_rate": 6.234615384615384e-05, |
|
"loss": 0.8805, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 29.62, |
|
"learning_rate": 6.196153846153846e-05, |
|
"loss": 0.8752, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 29.75, |
|
"learning_rate": 6.157692307692307e-05, |
|
"loss": 0.8197, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 29.87, |
|
"learning_rate": 6.119230769230769e-05, |
|
"loss": 0.8332, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"learning_rate": 6.08076923076923e-05, |
|
"loss": 0.7933, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 30.13, |
|
"learning_rate": 6.0423076923076924e-05, |
|
"loss": 0.8712, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 30.25, |
|
"learning_rate": 6.003846153846153e-05, |
|
"loss": 0.824, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 30.38, |
|
"learning_rate": 5.965384615384615e-05, |
|
"loss": 0.8158, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 30.51, |
|
"learning_rate": 5.926923076923076e-05, |
|
"loss": 0.8218, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 30.63, |
|
"learning_rate": 5.888461538461538e-05, |
|
"loss": 0.8403, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 30.76, |
|
"learning_rate": 5.85e-05, |
|
"loss": 0.7986, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 30.89, |
|
"learning_rate": 5.8115384615384614e-05, |
|
"loss": 0.8391, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 31.01, |
|
"learning_rate": 5.773076923076922e-05, |
|
"loss": 0.7736, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 31.14, |
|
"learning_rate": 5.734615384615384e-05, |
|
"loss": 0.8478, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 31.27, |
|
"learning_rate": 5.696153846153846e-05, |
|
"loss": 0.7728, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 31.39, |
|
"learning_rate": 5.6576923076923073e-05, |
|
"loss": 0.8231, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 31.52, |
|
"learning_rate": 5.619230769230769e-05, |
|
"loss": 0.7602, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 31.65, |
|
"learning_rate": 5.58076923076923e-05, |
|
"loss": 0.7838, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 31.65, |
|
"eval_cer": 0.2152122088112177, |
|
"eval_loss": 0.8952043056488037, |
|
"eval_runtime": 45.1763, |
|
"eval_samples_per_second": 10.647, |
|
"eval_steps_per_second": 1.35, |
|
"eval_wer": 0.555593451568895, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 31.77, |
|
"learning_rate": 5.542307692307691e-05, |
|
"loss": 0.8065, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 31.9, |
|
"learning_rate": 5.503846153846153e-05, |
|
"loss": 0.773, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 32.03, |
|
"learning_rate": 5.465384615384615e-05, |
|
"loss": 0.7854, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 32.15, |
|
"learning_rate": 5.426923076923076e-05, |
|
"loss": 0.7724, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 32.28, |
|
"learning_rate": 5.3884615384615384e-05, |
|
"loss": 0.7639, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 32.41, |
|
"learning_rate": 5.35e-05, |
|
"loss": 0.7993, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 32.53, |
|
"learning_rate": 5.311538461538461e-05, |
|
"loss": 0.7957, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 32.66, |
|
"learning_rate": 5.273076923076922e-05, |
|
"loss": 0.7686, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 32.78, |
|
"learning_rate": 5.234615384615384e-05, |
|
"loss": 0.8096, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 32.91, |
|
"learning_rate": 5.196153846153846e-05, |
|
"loss": 0.7357, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 33.04, |
|
"learning_rate": 5.1576923076923074e-05, |
|
"loss": 0.7674, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 33.16, |
|
"learning_rate": 5.119230769230769e-05, |
|
"loss": 0.7989, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 33.29, |
|
"learning_rate": 5.08076923076923e-05, |
|
"loss": 0.7474, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 33.42, |
|
"learning_rate": 5.042307692307692e-05, |
|
"loss": 0.7153, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 33.54, |
|
"learning_rate": 5.0038461538461533e-05, |
|
"loss": 0.7109, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 33.67, |
|
"learning_rate": 4.965384615384615e-05, |
|
"loss": 0.7841, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 33.8, |
|
"learning_rate": 4.926923076923076e-05, |
|
"loss": 0.762, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 33.92, |
|
"learning_rate": 4.8884615384615385e-05, |
|
"loss": 0.7414, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 34.05, |
|
"learning_rate": 4.849999999999999e-05, |
|
"loss": 0.7544, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 34.18, |
|
"learning_rate": 4.811538461538461e-05, |
|
"loss": 0.7338, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 34.3, |
|
"learning_rate": 4.773076923076922e-05, |
|
"loss": 0.7266, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 34.43, |
|
"learning_rate": 4.7346153846153845e-05, |
|
"loss": 0.7131, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 34.56, |
|
"learning_rate": 4.696153846153846e-05, |
|
"loss": 0.7291, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 34.68, |
|
"learning_rate": 4.6576923076923074e-05, |
|
"loss": 0.7051, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 34.81, |
|
"learning_rate": 4.619230769230769e-05, |
|
"loss": 0.7643, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 34.94, |
|
"learning_rate": 4.5807692307692304e-05, |
|
"loss": 0.727, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 35.06, |
|
"learning_rate": 4.542307692307692e-05, |
|
"loss": 0.7142, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 35.19, |
|
"learning_rate": 4.5038461538461534e-05, |
|
"loss": 0.7055, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 35.32, |
|
"learning_rate": 4.465384615384615e-05, |
|
"loss": 0.7339, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 35.44, |
|
"learning_rate": 4.426923076923077e-05, |
|
"loss": 0.6956, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 35.57, |
|
"learning_rate": 4.3884615384615385e-05, |
|
"loss": 0.7508, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 35.7, |
|
"learning_rate": 4.3499999999999993e-05, |
|
"loss": 0.7072, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 35.82, |
|
"learning_rate": 4.311538461538461e-05, |
|
"loss": 0.7103, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 35.95, |
|
"learning_rate": 4.273076923076923e-05, |
|
"loss": 0.6783, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 36.08, |
|
"learning_rate": 4.2346153846153845e-05, |
|
"loss": 0.7419, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 36.2, |
|
"learning_rate": 4.196153846153846e-05, |
|
"loss": 0.7091, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 36.33, |
|
"learning_rate": 4.1576923076923075e-05, |
|
"loss": 0.7073, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 36.46, |
|
"learning_rate": 4.119230769230768e-05, |
|
"loss": 0.6937, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 36.58, |
|
"learning_rate": 4.0807692307692305e-05, |
|
"loss": 0.756, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 36.71, |
|
"learning_rate": 4.042307692307692e-05, |
|
"loss": 0.6744, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 36.84, |
|
"learning_rate": 4.0038461538461534e-05, |
|
"loss": 0.7165, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 36.96, |
|
"learning_rate": 3.9653846153846156e-05, |
|
"loss": 0.6831, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 37.09, |
|
"learning_rate": 3.926923076923077e-05, |
|
"loss": 0.6894, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 37.22, |
|
"learning_rate": 3.888461538461538e-05, |
|
"loss": 0.6419, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 37.34, |
|
"learning_rate": 3.8499999999999994e-05, |
|
"loss": 0.7187, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 37.47, |
|
"learning_rate": 3.811538461538461e-05, |
|
"loss": 0.677, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 37.59, |
|
"learning_rate": 3.773076923076923e-05, |
|
"loss": 0.7263, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 37.72, |
|
"learning_rate": 3.734615384615384e-05, |
|
"loss": 0.6257, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 37.85, |
|
"learning_rate": 3.696153846153846e-05, |
|
"loss": 0.7051, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 37.97, |
|
"learning_rate": 3.6576923076923075e-05, |
|
"loss": 0.6665, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 37.97, |
|
"eval_cer": 0.2016781484053836, |
|
"eval_loss": 0.8907838463783264, |
|
"eval_runtime": 45.6242, |
|
"eval_samples_per_second": 10.543, |
|
"eval_steps_per_second": 1.337, |
|
"eval_wer": 0.5251534788540245, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 38.1, |
|
"learning_rate": 3.619230769230769e-05, |
|
"loss": 0.7016, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 38.23, |
|
"learning_rate": 3.5807692307692305e-05, |
|
"loss": 0.6585, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 38.35, |
|
"learning_rate": 3.542307692307692e-05, |
|
"loss": 0.6673, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 38.48, |
|
"learning_rate": 3.5038461538461535e-05, |
|
"loss": 0.6411, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 38.61, |
|
"learning_rate": 3.465384615384615e-05, |
|
"loss": 0.7038, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 38.73, |
|
"learning_rate": 3.4269230769230765e-05, |
|
"loss": 0.6458, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 38.86, |
|
"learning_rate": 3.3884615384615386e-05, |
|
"loss": 0.7231, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 38.99, |
|
"learning_rate": 3.3499999999999994e-05, |
|
"loss": 0.6495, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 39.11, |
|
"learning_rate": 3.3115384615384616e-05, |
|
"loss": 0.6788, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 39.24, |
|
"learning_rate": 3.273076923076923e-05, |
|
"loss": 0.6452, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 39.37, |
|
"learning_rate": 3.2346153846153846e-05, |
|
"loss": 0.7015, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 39.49, |
|
"learning_rate": 3.196153846153846e-05, |
|
"loss": 0.6518, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 39.62, |
|
"learning_rate": 3.1576923076923076e-05, |
|
"loss": 0.6757, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 39.75, |
|
"learning_rate": 3.119230769230769e-05, |
|
"loss": 0.6495, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 39.87, |
|
"learning_rate": 3.0807692307692305e-05, |
|
"loss": 0.6434, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 3.0423076923076924e-05, |
|
"loss": 0.6132, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 40.13, |
|
"learning_rate": 3.0038461538461535e-05, |
|
"loss": 0.6959, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 40.25, |
|
"learning_rate": 2.965384615384615e-05, |
|
"loss": 0.6468, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 40.38, |
|
"learning_rate": 2.926923076923077e-05, |
|
"loss": 0.6681, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 40.51, |
|
"learning_rate": 2.888461538461538e-05, |
|
"loss": 0.6446, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 40.63, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 0.6554, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 40.76, |
|
"learning_rate": 2.8115384615384613e-05, |
|
"loss": 0.6204, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 40.89, |
|
"learning_rate": 2.7730769230769228e-05, |
|
"loss": 0.677, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 41.01, |
|
"learning_rate": 2.7346153846153843e-05, |
|
"loss": 0.5961, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 41.14, |
|
"learning_rate": 2.696153846153846e-05, |
|
"loss": 0.665, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 41.27, |
|
"learning_rate": 2.6576923076923073e-05, |
|
"loss": 0.6753, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 41.39, |
|
"learning_rate": 2.619230769230769e-05, |
|
"loss": 0.6387, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 41.52, |
|
"learning_rate": 2.5807692307692306e-05, |
|
"loss": 0.6281, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 41.65, |
|
"learning_rate": 2.542307692307692e-05, |
|
"loss": 0.6287, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 41.77, |
|
"learning_rate": 2.5038461538461536e-05, |
|
"loss": 0.6413, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 41.9, |
|
"learning_rate": 2.4653846153846154e-05, |
|
"loss": 0.6061, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 42.03, |
|
"learning_rate": 2.4269230769230765e-05, |
|
"loss": 0.648, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 42.15, |
|
"learning_rate": 2.3884615384615384e-05, |
|
"loss": 0.5926, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 42.28, |
|
"learning_rate": 2.35e-05, |
|
"loss": 0.6366, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 42.41, |
|
"learning_rate": 2.3115384615384614e-05, |
|
"loss": 0.6625, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 42.53, |
|
"learning_rate": 2.273076923076923e-05, |
|
"loss": 0.634, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 42.66, |
|
"learning_rate": 2.2346153846153847e-05, |
|
"loss": 0.618, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 42.78, |
|
"learning_rate": 2.1961538461538458e-05, |
|
"loss": 0.5911, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 42.91, |
|
"learning_rate": 2.1576923076923076e-05, |
|
"loss": 0.5936, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 43.04, |
|
"learning_rate": 2.119230769230769e-05, |
|
"loss": 0.6267, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 43.16, |
|
"learning_rate": 2.0807692307692303e-05, |
|
"loss": 0.6123, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 43.29, |
|
"learning_rate": 2.042307692307692e-05, |
|
"loss": 0.6398, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 43.42, |
|
"learning_rate": 2.003846153846154e-05, |
|
"loss": 0.606, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 43.54, |
|
"learning_rate": 1.965384615384615e-05, |
|
"loss": 0.6253, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 43.67, |
|
"learning_rate": 1.9269230769230766e-05, |
|
"loss": 0.5847, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 43.8, |
|
"learning_rate": 1.8884615384615384e-05, |
|
"loss": 0.6248, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 43.92, |
|
"learning_rate": 1.85e-05, |
|
"loss": 0.5884, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 44.05, |
|
"learning_rate": 1.8115384615384614e-05, |
|
"loss": 0.6038, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 44.18, |
|
"learning_rate": 1.773076923076923e-05, |
|
"loss": 0.5888, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 44.3, |
|
"learning_rate": 1.7346153846153844e-05, |
|
"loss": 0.6265, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 44.3, |
|
"eval_cer": 0.19540855592889456, |
|
"eval_loss": 0.9062958359718323, |
|
"eval_runtime": 44.6904, |
|
"eval_samples_per_second": 10.763, |
|
"eval_steps_per_second": 1.365, |
|
"eval_wer": 0.5133015006821282, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 44.43, |
|
"learning_rate": 1.8412499999999997e-05, |
|
"loss": 0.6002, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 44.56, |
|
"learning_rate": 1.8037499999999998e-05, |
|
"loss": 0.6191, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 44.68, |
|
"learning_rate": 1.76625e-05, |
|
"loss": 0.5811, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 44.81, |
|
"learning_rate": 1.72875e-05, |
|
"loss": 0.6299, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 44.94, |
|
"learning_rate": 1.6912499999999998e-05, |
|
"loss": 0.5605, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 45.06, |
|
"learning_rate": 1.65375e-05, |
|
"loss": 0.6183, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 45.19, |
|
"learning_rate": 1.61625e-05, |
|
"loss": 0.5852, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 45.32, |
|
"learning_rate": 1.5787499999999997e-05, |
|
"loss": 0.594, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 45.44, |
|
"learning_rate": 1.54125e-05, |
|
"loss": 0.5965, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 45.57, |
|
"learning_rate": 1.50375e-05, |
|
"loss": 0.6005, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 45.7, |
|
"learning_rate": 1.4662499999999999e-05, |
|
"loss": 0.5884, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 45.82, |
|
"learning_rate": 1.4287499999999998e-05, |
|
"loss": 0.5884, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 45.95, |
|
"learning_rate": 1.39125e-05, |
|
"loss": 0.5628, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 46.08, |
|
"learning_rate": 1.3537499999999999e-05, |
|
"loss": 0.6339, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 46.2, |
|
"learning_rate": 1.3162499999999998e-05, |
|
"loss": 0.5578, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 46.33, |
|
"learning_rate": 1.2787499999999999e-05, |
|
"loss": 0.6239, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 46.46, |
|
"learning_rate": 1.24125e-05, |
|
"loss": 0.5872, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 46.58, |
|
"learning_rate": 1.20375e-05, |
|
"loss": 0.5697, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 46.71, |
|
"learning_rate": 1.1662499999999999e-05, |
|
"loss": 0.5475, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 46.84, |
|
"learning_rate": 1.1287499999999998e-05, |
|
"loss": 0.5979, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 46.96, |
|
"learning_rate": 1.0912499999999998e-05, |
|
"loss": 0.5742, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 47.09, |
|
"learning_rate": 1.05375e-05, |
|
"loss": 0.6054, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 47.22, |
|
"learning_rate": 1.01625e-05, |
|
"loss": 0.5777, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 47.34, |
|
"learning_rate": 9.787499999999999e-06, |
|
"loss": 0.5734, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 47.47, |
|
"learning_rate": 9.412499999999998e-06, |
|
"loss": 0.5322, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 47.59, |
|
"learning_rate": 9.0375e-06, |
|
"loss": 0.6287, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 47.72, |
|
"learning_rate": 8.6625e-06, |
|
"loss": 0.547, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 47.85, |
|
"learning_rate": 8.2875e-06, |
|
"loss": 0.6414, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 47.97, |
|
"learning_rate": 7.9125e-06, |
|
"loss": 0.5661, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 48.1, |
|
"learning_rate": 7.537499999999999e-06, |
|
"loss": 0.5893, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 48.23, |
|
"learning_rate": 7.1625e-06, |
|
"loss": 0.556, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 48.35, |
|
"learning_rate": 6.787499999999999e-06, |
|
"loss": 0.6265, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 48.48, |
|
"learning_rate": 6.4125e-06, |
|
"loss": 0.5644, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 48.61, |
|
"learning_rate": 6.037499999999999e-06, |
|
"loss": 0.6202, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 48.73, |
|
"learning_rate": 5.6624999999999996e-06, |
|
"loss": 0.5581, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 48.86, |
|
"learning_rate": 5.287499999999999e-06, |
|
"loss": 0.572, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 48.99, |
|
"learning_rate": 4.9125e-06, |
|
"loss": 0.5559, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 49.11, |
|
"learning_rate": 4.537499999999999e-06, |
|
"loss": 0.6013, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 49.24, |
|
"learning_rate": 4.1624999999999995e-06, |
|
"loss": 0.5498, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 49.37, |
|
"learning_rate": 3.7874999999999997e-06, |
|
"loss": 0.5883, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 49.49, |
|
"learning_rate": 3.4124999999999995e-06, |
|
"loss": 0.5777, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 49.62, |
|
"learning_rate": 3.0374999999999997e-06, |
|
"loss": 0.5768, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 49.75, |
|
"learning_rate": 2.6624999999999995e-06, |
|
"loss": 0.5603, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 49.87, |
|
"learning_rate": 2.2874999999999997e-06, |
|
"loss": 0.5814, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 1.9125e-06, |
|
"loss": 0.5562, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 50.13, |
|
"learning_rate": 1.5374999999999999e-06, |
|
"loss": 0.5858, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 50.25, |
|
"learning_rate": 1.1624999999999999e-06, |
|
"loss": 0.5279, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 50.38, |
|
"learning_rate": 7.875e-07, |
|
"loss": 0.5734, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 50.51, |
|
"learning_rate": 4.124999999999999e-07, |
|
"loss": 0.5895, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 50.63, |
|
"learning_rate": 3.75e-08, |
|
"loss": 0.5935, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 50.63, |
|
"eval_cer": 0.1969102547256584, |
|
"eval_loss": 0.9162458181381226, |
|
"eval_runtime": 44.8405, |
|
"eval_samples_per_second": 10.727, |
|
"eval_steps_per_second": 1.36, |
|
"eval_wer": 0.5156036834924966, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 50.63, |
|
"step": 4000, |
|
"total_flos": 4.6049855774374625e+19, |
|
"train_loss": 0.07298430502414703, |
|
"train_runtime": 2125.6437, |
|
"train_samples_per_second": 60.217, |
|
"train_steps_per_second": 1.882 |
|
} |
|
], |
|
"max_steps": 4000, |
|
"num_train_epochs": 51, |
|
"total_flos": 4.6049855774374625e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|