|
{ |
|
"best_metric": 10.634930784232232, |
|
"best_model_checkpoint": "./whisper-small-mix-pt/checkpoint-4000", |
|
"epoch": 2.062, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005, |
|
"grad_norm": 8.937539100646973, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 0.8175, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.877315044403076, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 0.7318, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.015, |
|
"grad_norm": 4.961461544036865, |
|
"learning_rate": 1.46e-06, |
|
"loss": 1.0495, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.423418045043945, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 0.7303, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 3.701725482940674, |
|
"learning_rate": 2.46e-06, |
|
"loss": 0.5486, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.901665449142456, |
|
"learning_rate": 2.96e-06, |
|
"loss": 0.4327, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.035, |
|
"grad_norm": 2.7568914890289307, |
|
"learning_rate": 3.46e-06, |
|
"loss": 0.3445, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.0615532398223877, |
|
"learning_rate": 3.96e-06, |
|
"loss": 0.317, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.045, |
|
"grad_norm": 2.577845573425293, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 0.2995, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.810206890106201, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 0.2701, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.055, |
|
"grad_norm": 2.9507219791412354, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 0.2275, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.8784899711608887, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 0.4164, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.065, |
|
"grad_norm": 2.7221858501434326, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 0.2831, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.760021209716797, |
|
"learning_rate": 6.96e-06, |
|
"loss": 0.2202, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 2.8915979862213135, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 0.2165, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.7558434009552, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 0.207, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.085, |
|
"grad_norm": 2.5287721157073975, |
|
"learning_rate": 8.46e-06, |
|
"loss": 0.2529, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.026515483856201, |
|
"learning_rate": 8.96e-06, |
|
"loss": 0.2134, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.095, |
|
"grad_norm": 2.3035366535186768, |
|
"learning_rate": 9.460000000000001e-06, |
|
"loss": 0.1968, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.298630952835083, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 0.1881, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.105, |
|
"grad_norm": 2.209261417388916, |
|
"learning_rate": 9.94888888888889e-06, |
|
"loss": 0.162, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.819951295852661, |
|
"learning_rate": 9.893333333333334e-06, |
|
"loss": 0.1646, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.115, |
|
"grad_norm": 2.437563896179199, |
|
"learning_rate": 9.837777777777778e-06, |
|
"loss": 0.1705, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.5952935218811035, |
|
"learning_rate": 9.782222222222222e-06, |
|
"loss": 0.1938, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 2.4692909717559814, |
|
"learning_rate": 9.726666666666668e-06, |
|
"loss": 0.1869, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.4430558681488037, |
|
"learning_rate": 9.671111111111112e-06, |
|
"loss": 0.1788, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.135, |
|
"grad_norm": 2.2733707427978516, |
|
"learning_rate": 9.615555555555558e-06, |
|
"loss": 0.1934, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.256990909576416, |
|
"learning_rate": 9.56e-06, |
|
"loss": 0.1622, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.145, |
|
"grad_norm": 2.216128349304199, |
|
"learning_rate": 9.504444444444446e-06, |
|
"loss": 0.157, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.0830190181732178, |
|
"learning_rate": 9.44888888888889e-06, |
|
"loss": 0.169, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.155, |
|
"grad_norm": 2.085129737854004, |
|
"learning_rate": 9.393333333333334e-06, |
|
"loss": 0.1548, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.3641719818115234, |
|
"learning_rate": 9.33777777777778e-06, |
|
"loss": 0.1722, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.165, |
|
"grad_norm": 3.0489625930786133, |
|
"learning_rate": 9.282222222222222e-06, |
|
"loss": 0.2306, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.5903120040893555, |
|
"learning_rate": 9.226666666666668e-06, |
|
"loss": 0.2255, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.175, |
|
"grad_norm": 2.470798969268799, |
|
"learning_rate": 9.171111111111112e-06, |
|
"loss": 0.1983, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.7610605955123901, |
|
"learning_rate": 9.115555555555556e-06, |
|
"loss": 0.1991, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.185, |
|
"grad_norm": 2.5426924228668213, |
|
"learning_rate": 9.060000000000001e-06, |
|
"loss": 0.1584, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.5950539112091064, |
|
"learning_rate": 9.004444444444445e-06, |
|
"loss": 0.1707, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.195, |
|
"grad_norm": 2.2430670261383057, |
|
"learning_rate": 8.94888888888889e-06, |
|
"loss": 0.1682, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.1364951133728027, |
|
"learning_rate": 8.893333333333333e-06, |
|
"loss": 0.1575, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 0.21252164244651794, |
|
"eval_runtime": 385.7404, |
|
"eval_samples_per_second": 24.542, |
|
"eval_steps_per_second": 3.069, |
|
"eval_wer": 12.185491863550626, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.205, |
|
"grad_norm": 2.343320846557617, |
|
"learning_rate": 8.83777777777778e-06, |
|
"loss": 0.1679, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.078854560852051, |
|
"learning_rate": 8.782222222222223e-06, |
|
"loss": 0.1688, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.215, |
|
"grad_norm": 2.009870767593384, |
|
"learning_rate": 8.726666666666667e-06, |
|
"loss": 0.144, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.4615126848220825, |
|
"learning_rate": 8.671111111111113e-06, |
|
"loss": 0.127, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.225, |
|
"grad_norm": 2.175366163253784, |
|
"learning_rate": 8.615555555555555e-06, |
|
"loss": 0.1191, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.3462347984313965, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 0.1214, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.235, |
|
"grad_norm": 1.7791441679000854, |
|
"learning_rate": 8.504444444444445e-06, |
|
"loss": 0.1213, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.0184803009033203, |
|
"learning_rate": 8.448888888888889e-06, |
|
"loss": 0.3185, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.245, |
|
"grad_norm": 2.1637368202209473, |
|
"learning_rate": 8.393333333333335e-06, |
|
"loss": 0.2747, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.534830093383789, |
|
"learning_rate": 8.337777777777777e-06, |
|
"loss": 0.1535, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.255, |
|
"grad_norm": 3.3045480251312256, |
|
"learning_rate": 8.282222222222223e-06, |
|
"loss": 0.6384, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.566434383392334, |
|
"learning_rate": 8.226666666666667e-06, |
|
"loss": 0.4608, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.265, |
|
"grad_norm": 3.436089038848877, |
|
"learning_rate": 8.171111111111113e-06, |
|
"loss": 0.3286, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.8783295154571533, |
|
"learning_rate": 8.115555555555557e-06, |
|
"loss": 0.3508, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.275, |
|
"grad_norm": 3.244107723236084, |
|
"learning_rate": 8.06e-06, |
|
"loss": 0.3174, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.4489357471466064, |
|
"learning_rate": 8.004444444444445e-06, |
|
"loss": 0.2287, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.285, |
|
"grad_norm": 2.270322561264038, |
|
"learning_rate": 7.948888888888889e-06, |
|
"loss": 0.1828, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.105621814727783, |
|
"learning_rate": 7.893333333333335e-06, |
|
"loss": 0.1336, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.295, |
|
"grad_norm": 1.8718715906143188, |
|
"learning_rate": 7.837777777777779e-06, |
|
"loss": 0.1358, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.595679759979248, |
|
"learning_rate": 7.782222222222223e-06, |
|
"loss": 0.1244, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.305, |
|
"grad_norm": 1.979271411895752, |
|
"learning_rate": 7.726666666666667e-06, |
|
"loss": 0.1222, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.024198532104492, |
|
"learning_rate": 7.67111111111111e-06, |
|
"loss": 0.1274, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.315, |
|
"grad_norm": 1.9602785110473633, |
|
"learning_rate": 7.6155555555555564e-06, |
|
"loss": 0.1243, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.508364677429199, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 0.1896, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.325, |
|
"grad_norm": 2.0765221118927, |
|
"learning_rate": 7.504444444444445e-06, |
|
"loss": 0.1707, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.030231475830078, |
|
"learning_rate": 7.44888888888889e-06, |
|
"loss": 0.1341, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.335, |
|
"grad_norm": 2.9829025268554688, |
|
"learning_rate": 7.393333333333333e-06, |
|
"loss": 0.1305, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.1654741764068604, |
|
"learning_rate": 7.337777777777778e-06, |
|
"loss": 0.122, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.345, |
|
"grad_norm": 2.106933832168579, |
|
"learning_rate": 7.282222222222222e-06, |
|
"loss": 0.1427, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 3.380559206008911, |
|
"learning_rate": 7.226666666666667e-06, |
|
"loss": 0.3227, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.355, |
|
"grad_norm": 2.084108829498291, |
|
"learning_rate": 7.171111111111112e-06, |
|
"loss": 0.194, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.1483328342437744, |
|
"learning_rate": 7.115555555555557e-06, |
|
"loss": 0.1193, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.365, |
|
"grad_norm": 1.6093554496765137, |
|
"learning_rate": 7.06e-06, |
|
"loss": 0.0958, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.4673306941986084, |
|
"learning_rate": 7.004444444444445e-06, |
|
"loss": 0.0896, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 1.4364008903503418, |
|
"learning_rate": 6.948888888888889e-06, |
|
"loss": 0.0897, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4021852016448975, |
|
"learning_rate": 6.893333333333334e-06, |
|
"loss": 0.0772, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.385, |
|
"grad_norm": 1.5660924911499023, |
|
"learning_rate": 6.837777777777779e-06, |
|
"loss": 0.0773, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.9443961381912231, |
|
"learning_rate": 6.782222222222222e-06, |
|
"loss": 0.1027, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.395, |
|
"grad_norm": 2.002857208251953, |
|
"learning_rate": 6.726666666666667e-06, |
|
"loss": 0.1945, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.1631336212158203, |
|
"learning_rate": 6.671111111111112e-06, |
|
"loss": 0.1986, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 0.20620137453079224, |
|
"eval_runtime": 384.1444, |
|
"eval_samples_per_second": 24.644, |
|
"eval_steps_per_second": 3.082, |
|
"eval_wer": 11.570062981553118, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.405, |
|
"grad_norm": 1.9269295930862427, |
|
"learning_rate": 6.615555555555556e-06, |
|
"loss": 0.1477, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.8260035514831543, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 0.0935, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.415, |
|
"grad_norm": 1.3721944093704224, |
|
"learning_rate": 6.504444444444446e-06, |
|
"loss": 0.0801, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.8245339393615723, |
|
"learning_rate": 6.448888888888889e-06, |
|
"loss": 0.1047, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.425, |
|
"grad_norm": 1.5538787841796875, |
|
"learning_rate": 6.393333333333334e-06, |
|
"loss": 0.0879, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.519700527191162, |
|
"learning_rate": 6.3377777777777786e-06, |
|
"loss": 0.0813, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.435, |
|
"grad_norm": 3.4600226879119873, |
|
"learning_rate": 6.282222222222223e-06, |
|
"loss": 0.1835, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.3525190353393555, |
|
"learning_rate": 6.2266666666666675e-06, |
|
"loss": 0.6174, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.445, |
|
"grad_norm": 2.3610615730285645, |
|
"learning_rate": 6.171111111111112e-06, |
|
"loss": 0.6218, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.5530322790145874, |
|
"learning_rate": 6.1155555555555555e-06, |
|
"loss": 0.2095, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.455, |
|
"grad_norm": 1.4927952289581299, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 0.0967, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.6570786237716675, |
|
"learning_rate": 6.004444444444445e-06, |
|
"loss": 0.0895, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.465, |
|
"grad_norm": 1.5751352310180664, |
|
"learning_rate": 5.948888888888889e-06, |
|
"loss": 0.0878, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.001, |
|
"grad_norm": 2.1557564735412598, |
|
"learning_rate": 5.893333333333334e-06, |
|
"loss": 0.095, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.006, |
|
"grad_norm": 2.489004611968994, |
|
"learning_rate": 5.837777777777777e-06, |
|
"loss": 0.1279, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.011, |
|
"grad_norm": 3.284184217453003, |
|
"learning_rate": 5.782222222222222e-06, |
|
"loss": 0.1914, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.016, |
|
"grad_norm": 3.20249080657959, |
|
"learning_rate": 5.726666666666667e-06, |
|
"loss": 0.526, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.021, |
|
"grad_norm": 2.9400064945220947, |
|
"learning_rate": 5.671111111111112e-06, |
|
"loss": 0.2781, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.026, |
|
"grad_norm": 1.9436590671539307, |
|
"learning_rate": 5.615555555555556e-06, |
|
"loss": 0.1756, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.031, |
|
"grad_norm": 2.086899757385254, |
|
"learning_rate": 5.560000000000001e-06, |
|
"loss": 0.1469, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.036, |
|
"grad_norm": 1.6339154243469238, |
|
"learning_rate": 5.504444444444444e-06, |
|
"loss": 0.1348, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.041, |
|
"grad_norm": 2.1945927143096924, |
|
"learning_rate": 5.448888888888889e-06, |
|
"loss": 0.112, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.046, |
|
"grad_norm": 1.639339804649353, |
|
"learning_rate": 5.393333333333334e-06, |
|
"loss": 0.1134, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.051, |
|
"grad_norm": 1.7829625606536865, |
|
"learning_rate": 5.337777777777779e-06, |
|
"loss": 0.098, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 1.5553560256958008, |
|
"learning_rate": 5.282222222222223e-06, |
|
"loss": 0.0706, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.061, |
|
"grad_norm": 2.2982029914855957, |
|
"learning_rate": 5.226666666666667e-06, |
|
"loss": 0.2427, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.066, |
|
"grad_norm": 1.794986367225647, |
|
"learning_rate": 5.171111111111111e-06, |
|
"loss": 0.116, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.071, |
|
"grad_norm": 1.2313064336776733, |
|
"learning_rate": 5.115555555555556e-06, |
|
"loss": 0.074, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.076, |
|
"grad_norm": 1.4959907531738281, |
|
"learning_rate": 5.060000000000001e-06, |
|
"loss": 0.0758, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.081, |
|
"grad_norm": 1.216587781906128, |
|
"learning_rate": 5.004444444444445e-06, |
|
"loss": 0.0787, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.086, |
|
"grad_norm": 1.8736680746078491, |
|
"learning_rate": 4.94888888888889e-06, |
|
"loss": 0.1223, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.091, |
|
"grad_norm": 1.2765681743621826, |
|
"learning_rate": 4.893333333333334e-06, |
|
"loss": 0.1025, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.096, |
|
"grad_norm": 1.614762544631958, |
|
"learning_rate": 4.837777777777778e-06, |
|
"loss": 0.089, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.101, |
|
"grad_norm": 1.6263494491577148, |
|
"learning_rate": 4.7822222222222226e-06, |
|
"loss": 0.1046, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.106, |
|
"grad_norm": 1.6685870885849, |
|
"learning_rate": 4.7266666666666674e-06, |
|
"loss": 0.0845, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.111, |
|
"grad_norm": 1.2902885675430298, |
|
"learning_rate": 4.6711111111111115e-06, |
|
"loss": 0.0806, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.116, |
|
"grad_norm": 1.8994227647781372, |
|
"learning_rate": 4.6155555555555555e-06, |
|
"loss": 0.0767, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.121, |
|
"grad_norm": 1.3723390102386475, |
|
"learning_rate": 4.56e-06, |
|
"loss": 0.0716, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.126, |
|
"grad_norm": 1.6283633708953857, |
|
"learning_rate": 4.504444444444444e-06, |
|
"loss": 0.0731, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.131, |
|
"grad_norm": 1.3962900638580322, |
|
"learning_rate": 4.448888888888889e-06, |
|
"loss": 0.0942, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.131, |
|
"eval_loss": 0.19787383079528809, |
|
"eval_runtime": 384.7545, |
|
"eval_samples_per_second": 24.605, |
|
"eval_steps_per_second": 3.077, |
|
"eval_wer": 11.015377729467055, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.1360000000000001, |
|
"grad_norm": 1.321205973625183, |
|
"learning_rate": 4.393333333333334e-06, |
|
"loss": 0.0731, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.141, |
|
"grad_norm": 1.2057939767837524, |
|
"learning_rate": 4.337777777777778e-06, |
|
"loss": 0.0646, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.146, |
|
"grad_norm": 1.0545603036880493, |
|
"learning_rate": 4.282222222222222e-06, |
|
"loss": 0.0628, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.151, |
|
"grad_norm": 1.363511562347412, |
|
"learning_rate": 4.226666666666667e-06, |
|
"loss": 0.0614, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.156, |
|
"grad_norm": 1.1930261850357056, |
|
"learning_rate": 4.171111111111111e-06, |
|
"loss": 0.0634, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.161, |
|
"grad_norm": 3.0981078147888184, |
|
"learning_rate": 4.115555555555556e-06, |
|
"loss": 0.2114, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.166, |
|
"grad_norm": 1.6411216259002686, |
|
"learning_rate": 4.060000000000001e-06, |
|
"loss": 0.1761, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.171, |
|
"grad_norm": 1.1803615093231201, |
|
"learning_rate": 4.004444444444445e-06, |
|
"loss": 0.0729, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.176, |
|
"grad_norm": 3.052395820617676, |
|
"learning_rate": 3.948888888888889e-06, |
|
"loss": 0.3801, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.181, |
|
"grad_norm": 2.7270925045013428, |
|
"learning_rate": 3.893333333333333e-06, |
|
"loss": 0.3596, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.186, |
|
"grad_norm": 1.7751537561416626, |
|
"learning_rate": 3.837777777777778e-06, |
|
"loss": 0.2001, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 1.191, |
|
"grad_norm": 1.7908082008361816, |
|
"learning_rate": 3.782222222222223e-06, |
|
"loss": 0.1933, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.196, |
|
"grad_norm": 1.8746870756149292, |
|
"learning_rate": 3.726666666666667e-06, |
|
"loss": 0.2034, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 1.201, |
|
"grad_norm": 1.8763881921768188, |
|
"learning_rate": 3.6711111111111113e-06, |
|
"loss": 0.1608, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.206, |
|
"grad_norm": 1.5297502279281616, |
|
"learning_rate": 3.615555555555556e-06, |
|
"loss": 0.1099, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 1.211, |
|
"grad_norm": 1.4193453788757324, |
|
"learning_rate": 3.5600000000000002e-06, |
|
"loss": 0.079, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 1.5194439888000488, |
|
"learning_rate": 3.5044444444444447e-06, |
|
"loss": 0.0652, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 1.221, |
|
"grad_norm": 1.3446123600006104, |
|
"learning_rate": 3.4488888888888896e-06, |
|
"loss": 0.0623, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.226, |
|
"grad_norm": 1.293591856956482, |
|
"learning_rate": 3.3933333333333336e-06, |
|
"loss": 0.063, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 1.231, |
|
"grad_norm": 1.3113939762115479, |
|
"learning_rate": 3.337777777777778e-06, |
|
"loss": 0.0691, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.236, |
|
"grad_norm": 1.8869807720184326, |
|
"learning_rate": 3.282222222222223e-06, |
|
"loss": 0.0697, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 1.241, |
|
"grad_norm": 1.311045527458191, |
|
"learning_rate": 3.226666666666667e-06, |
|
"loss": 0.1023, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.246, |
|
"grad_norm": 1.4343574047088623, |
|
"learning_rate": 3.1711111111111114e-06, |
|
"loss": 0.0833, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 1.251, |
|
"grad_norm": 1.6197088956832886, |
|
"learning_rate": 3.1155555555555555e-06, |
|
"loss": 0.0812, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.256, |
|
"grad_norm": 1.4370137453079224, |
|
"learning_rate": 3.0600000000000003e-06, |
|
"loss": 0.0666, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 1.2610000000000001, |
|
"grad_norm": 1.3375777006149292, |
|
"learning_rate": 3.004444444444445e-06, |
|
"loss": 0.058, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.266, |
|
"grad_norm": 1.6165860891342163, |
|
"learning_rate": 2.948888888888889e-06, |
|
"loss": 0.0736, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 1.271, |
|
"grad_norm": 2.2871458530426025, |
|
"learning_rate": 2.8933333333333337e-06, |
|
"loss": 0.0943, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.276, |
|
"grad_norm": 1.4543273448944092, |
|
"learning_rate": 2.837777777777778e-06, |
|
"loss": 0.0904, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 1.2810000000000001, |
|
"grad_norm": 1.64260995388031, |
|
"learning_rate": 2.7822222222222222e-06, |
|
"loss": 0.0736, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.286, |
|
"grad_norm": 1.1954628229141235, |
|
"learning_rate": 2.726666666666667e-06, |
|
"loss": 0.0634, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 1.291, |
|
"grad_norm": 1.245037317276001, |
|
"learning_rate": 2.6711111111111116e-06, |
|
"loss": 0.0759, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.296, |
|
"grad_norm": 1.1573712825775146, |
|
"learning_rate": 2.6155555555555556e-06, |
|
"loss": 0.0695, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 1.301, |
|
"grad_norm": 1.0521385669708252, |
|
"learning_rate": 2.56e-06, |
|
"loss": 0.058, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.306, |
|
"grad_norm": 1.6519237756729126, |
|
"learning_rate": 2.504444444444445e-06, |
|
"loss": 0.0706, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 1.311, |
|
"grad_norm": 1.3316805362701416, |
|
"learning_rate": 2.448888888888889e-06, |
|
"loss": 0.0681, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.316, |
|
"grad_norm": 1.8995634317398071, |
|
"learning_rate": 2.3933333333333334e-06, |
|
"loss": 0.1458, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 1.321, |
|
"grad_norm": 1.9890100955963135, |
|
"learning_rate": 2.337777777777778e-06, |
|
"loss": 0.1334, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.326, |
|
"grad_norm": 1.2592449188232422, |
|
"learning_rate": 2.2822222222222223e-06, |
|
"loss": 0.1247, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 1.331, |
|
"grad_norm": 1.3058019876480103, |
|
"learning_rate": 2.226666666666667e-06, |
|
"loss": 0.0577, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.331, |
|
"eval_loss": 0.2000385820865631, |
|
"eval_runtime": 386.3157, |
|
"eval_samples_per_second": 24.506, |
|
"eval_steps_per_second": 3.065, |
|
"eval_wer": 10.634930784232232, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.336, |
|
"grad_norm": 1.0941780805587769, |
|
"learning_rate": 2.1711111111111113e-06, |
|
"loss": 0.0498, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 1.341, |
|
"grad_norm": 1.407287836074829, |
|
"learning_rate": 2.1155555555555557e-06, |
|
"loss": 0.0479, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.346, |
|
"grad_norm": 0.8003798723220825, |
|
"learning_rate": 2.06e-06, |
|
"loss": 0.0518, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 1.351, |
|
"grad_norm": 1.1387590169906616, |
|
"learning_rate": 2.0044444444444446e-06, |
|
"loss": 0.0453, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.3559999999999999, |
|
"grad_norm": 2.531583786010742, |
|
"learning_rate": 1.948888888888889e-06, |
|
"loss": 0.1058, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 1.361, |
|
"grad_norm": 2.965607166290283, |
|
"learning_rate": 1.8933333333333333e-06, |
|
"loss": 0.4739, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.366, |
|
"grad_norm": 3.0888776779174805, |
|
"learning_rate": 1.837777777777778e-06, |
|
"loss": 0.5685, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 1.371, |
|
"grad_norm": 1.201741099357605, |
|
"learning_rate": 1.7822222222222225e-06, |
|
"loss": 0.203, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.376, |
|
"grad_norm": 1.1447727680206299, |
|
"learning_rate": 1.7266666666666667e-06, |
|
"loss": 0.0638, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 1.381, |
|
"grad_norm": 0.9738747477531433, |
|
"learning_rate": 1.6711111111111112e-06, |
|
"loss": 0.0554, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.3860000000000001, |
|
"grad_norm": 0.9229967594146729, |
|
"learning_rate": 1.6155555555555559e-06, |
|
"loss": 0.0513, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 1.391, |
|
"grad_norm": 1.1114579439163208, |
|
"learning_rate": 1.56e-06, |
|
"loss": 0.0515, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.396, |
|
"grad_norm": 2.101980447769165, |
|
"learning_rate": 1.5044444444444446e-06, |
|
"loss": 0.078, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 1.401, |
|
"grad_norm": 1.728270173072815, |
|
"learning_rate": 1.4488888888888892e-06, |
|
"loss": 0.1397, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.4060000000000001, |
|
"grad_norm": 1.6103638410568237, |
|
"learning_rate": 1.3933333333333335e-06, |
|
"loss": 0.0851, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 1.411, |
|
"grad_norm": 1.334058403968811, |
|
"learning_rate": 1.337777777777778e-06, |
|
"loss": 0.0715, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.416, |
|
"grad_norm": 1.138947606086731, |
|
"learning_rate": 1.2822222222222222e-06, |
|
"loss": 0.0652, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 1.421, |
|
"grad_norm": 1.4695849418640137, |
|
"learning_rate": 1.2266666666666666e-06, |
|
"loss": 0.0723, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.426, |
|
"grad_norm": 4.378330707550049, |
|
"learning_rate": 1.171111111111111e-06, |
|
"loss": 0.1358, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 1.431, |
|
"grad_norm": 1.7877916097640991, |
|
"learning_rate": 1.1155555555555558e-06, |
|
"loss": 0.2452, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.436, |
|
"grad_norm": 1.5879491567611694, |
|
"learning_rate": 1.06e-06, |
|
"loss": 0.1187, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 1.441, |
|
"grad_norm": 1.056880235671997, |
|
"learning_rate": 1.0044444444444445e-06, |
|
"loss": 0.0584, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.446, |
|
"grad_norm": 0.8786937594413757, |
|
"learning_rate": 9.488888888888889e-07, |
|
"loss": 0.0487, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 1.451, |
|
"grad_norm": 0.9723278880119324, |
|
"learning_rate": 8.933333333333334e-07, |
|
"loss": 0.0413, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.456, |
|
"grad_norm": 1.1325929164886475, |
|
"learning_rate": 8.37777777777778e-07, |
|
"loss": 0.0444, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 1.461, |
|
"grad_norm": 1.0554845333099365, |
|
"learning_rate": 7.822222222222223e-07, |
|
"loss": 0.0408, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.466, |
|
"grad_norm": 1.6001170873641968, |
|
"learning_rate": 7.266666666666668e-07, |
|
"loss": 0.0486, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 2.002, |
|
"grad_norm": 1.434520959854126, |
|
"learning_rate": 6.711111111111111e-07, |
|
"loss": 0.0605, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.007, |
|
"grad_norm": 2.3628861904144287, |
|
"learning_rate": 6.155555555555556e-07, |
|
"loss": 0.0848, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 2.012, |
|
"grad_norm": 1.8358659744262695, |
|
"learning_rate": 5.6e-07, |
|
"loss": 0.1237, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.017, |
|
"grad_norm": 1.7591222524642944, |
|
"learning_rate": 5.044444444444445e-07, |
|
"loss": 0.0846, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 2.022, |
|
"grad_norm": 1.3934890031814575, |
|
"learning_rate": 4.488888888888889e-07, |
|
"loss": 0.0855, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.027, |
|
"grad_norm": 1.0459132194519043, |
|
"learning_rate": 3.9333333333333336e-07, |
|
"loss": 0.0702, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 2.032, |
|
"grad_norm": 1.2503104209899902, |
|
"learning_rate": 3.3777777777777777e-07, |
|
"loss": 0.0637, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.037, |
|
"grad_norm": 1.1123186349868774, |
|
"learning_rate": 2.822222222222222e-07, |
|
"loss": 0.0555, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 2.042, |
|
"grad_norm": 1.0896210670471191, |
|
"learning_rate": 2.266666666666667e-07, |
|
"loss": 0.0538, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.047, |
|
"grad_norm": 1.138146162033081, |
|
"learning_rate": 1.7111111111111114e-07, |
|
"loss": 0.0486, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 2.052, |
|
"grad_norm": 1.1482701301574707, |
|
"learning_rate": 1.1555555555555556e-07, |
|
"loss": 0.0668, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.057, |
|
"grad_norm": 0.9316118955612183, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 0.0581, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 2.062, |
|
"grad_norm": 1.259885549545288, |
|
"learning_rate": 4.444444444444445e-09, |
|
"loss": 0.0516, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.062, |
|
"eval_loss": 0.20070870220661163, |
|
"eval_runtime": 383.0104, |
|
"eval_samples_per_second": 24.717, |
|
"eval_steps_per_second": 3.091, |
|
"eval_wer": 10.670098148917804, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.062, |
|
"step": 5000, |
|
"total_flos": 9.231442936676352e+19, |
|
"train_loss": 0.16698810555934906, |
|
"train_runtime": 16450.2071, |
|
"train_samples_per_second": 19.453, |
|
"train_steps_per_second": 0.304 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.231442936676352e+19, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|