|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.2776138970829236, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01638806948541462, |
|
"grad_norm": 16.339120864868164, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 3.195, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03277613897082924, |
|
"grad_norm": 11.416489601135254, |
|
"learning_rate": 9.800000000000001e-07, |
|
"loss": 2.5931, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.049164208456243856, |
|
"grad_norm": 10.20141887664795, |
|
"learning_rate": 1.48e-06, |
|
"loss": 2.0124, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06555227794165848, |
|
"grad_norm": 10.408208847045898, |
|
"learning_rate": 1.98e-06, |
|
"loss": 1.6783, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0819403474270731, |
|
"grad_norm": 10.501668930053711, |
|
"learning_rate": 2.4800000000000004e-06, |
|
"loss": 1.6138, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09832841691248771, |
|
"grad_norm": 9.28870964050293, |
|
"learning_rate": 2.9800000000000003e-06, |
|
"loss": 1.5721, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11471648639790233, |
|
"grad_norm": 10.96129322052002, |
|
"learning_rate": 3.48e-06, |
|
"loss": 1.4486, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.13110455588331696, |
|
"grad_norm": 9.153674125671387, |
|
"learning_rate": 3.980000000000001e-06, |
|
"loss": 1.3586, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14749262536873156, |
|
"grad_norm": 9.216712951660156, |
|
"learning_rate": 4.48e-06, |
|
"loss": 1.3824, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1638806948541462, |
|
"grad_norm": 9.344610214233398, |
|
"learning_rate": 4.980000000000001e-06, |
|
"loss": 1.3523, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1802687643395608, |
|
"grad_norm": 9.565411567687988, |
|
"learning_rate": 5.480000000000001e-06, |
|
"loss": 1.3112, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.19665683382497542, |
|
"grad_norm": 9.21192455291748, |
|
"learning_rate": 5.98e-06, |
|
"loss": 1.2953, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21304490331039003, |
|
"grad_norm": 9.407176971435547, |
|
"learning_rate": 6.480000000000001e-06, |
|
"loss": 1.297, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.22943297279580466, |
|
"grad_norm": 11.401097297668457, |
|
"learning_rate": 6.98e-06, |
|
"loss": 1.2525, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24582104228121926, |
|
"grad_norm": 7.8178181648254395, |
|
"learning_rate": 7.48e-06, |
|
"loss": 1.2158, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2622091117666339, |
|
"grad_norm": 10.073293685913086, |
|
"learning_rate": 7.980000000000002e-06, |
|
"loss": 1.2654, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2785971812520485, |
|
"grad_norm": 9.110590934753418, |
|
"learning_rate": 8.48e-06, |
|
"loss": 1.2629, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2949852507374631, |
|
"grad_norm": 9.107154846191406, |
|
"learning_rate": 8.98e-06, |
|
"loss": 1.2484, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3113733202228777, |
|
"grad_norm": 8.568262100219727, |
|
"learning_rate": 9.48e-06, |
|
"loss": 1.2081, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3277613897082924, |
|
"grad_norm": 7.736518383026123, |
|
"learning_rate": 9.980000000000001e-06, |
|
"loss": 1.234, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.344149459193707, |
|
"grad_norm": 8.957286834716797, |
|
"learning_rate": 9.946666666666667e-06, |
|
"loss": 1.1519, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3605375286791216, |
|
"grad_norm": 8.355584144592285, |
|
"learning_rate": 9.891111111111113e-06, |
|
"loss": 1.1517, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3769255981645362, |
|
"grad_norm": 6.338685512542725, |
|
"learning_rate": 9.835555555555556e-06, |
|
"loss": 1.1563, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.39331366764995085, |
|
"grad_norm": 8.323567390441895, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 1.1948, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.40970173713536545, |
|
"grad_norm": 8.510113716125488, |
|
"learning_rate": 9.724444444444445e-06, |
|
"loss": 1.1357, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.42608980662078005, |
|
"grad_norm": 8.770384788513184, |
|
"learning_rate": 9.66888888888889e-06, |
|
"loss": 1.1083, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 6.591556072235107, |
|
"learning_rate": 9.613333333333335e-06, |
|
"loss": 1.1361, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4588659455916093, |
|
"grad_norm": 9.191658973693848, |
|
"learning_rate": 9.557777777777777e-06, |
|
"loss": 1.1053, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4752540150770239, |
|
"grad_norm": 7.101362705230713, |
|
"learning_rate": 9.502222222222223e-06, |
|
"loss": 1.1253, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.4916420845624385, |
|
"grad_norm": 7.415987491607666, |
|
"learning_rate": 9.446666666666667e-06, |
|
"loss": 1.0605, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5080301540478531, |
|
"grad_norm": 7.0226616859436035, |
|
"learning_rate": 9.391111111111111e-06, |
|
"loss": 1.0165, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5244182235332678, |
|
"grad_norm": 8.283411026000977, |
|
"learning_rate": 9.335555555555557e-06, |
|
"loss": 1.0527, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5408062930186824, |
|
"grad_norm": 7.523382186889648, |
|
"learning_rate": 9.280000000000001e-06, |
|
"loss": 1.0575, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.557194362504097, |
|
"grad_norm": 8.657249450683594, |
|
"learning_rate": 9.224444444444445e-06, |
|
"loss": 1.0674, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5735824319895116, |
|
"grad_norm": 6.097987174987793, |
|
"learning_rate": 9.168888888888889e-06, |
|
"loss": 0.9898, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5899705014749262, |
|
"grad_norm": 8.815755844116211, |
|
"learning_rate": 9.113333333333335e-06, |
|
"loss": 1.0372, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6063585709603408, |
|
"grad_norm": 6.628593921661377, |
|
"learning_rate": 9.057777777777779e-06, |
|
"loss": 1.052, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.6227466404457555, |
|
"grad_norm": 7.880568027496338, |
|
"learning_rate": 9.002222222222223e-06, |
|
"loss": 1.0182, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.63913470993117, |
|
"grad_norm": 7.334752559661865, |
|
"learning_rate": 8.946666666666669e-06, |
|
"loss": 1.0596, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6555227794165848, |
|
"grad_norm": 6.15729284286499, |
|
"learning_rate": 8.891111111111111e-06, |
|
"loss": 0.9964, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6555227794165848, |
|
"eval_loss": 0.4889208674430847, |
|
"eval_runtime": 1659.2481, |
|
"eval_samples_per_second": 2.351, |
|
"eval_steps_per_second": 0.147, |
|
"eval_wer": 0.36632965537354784, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6719108489019994, |
|
"grad_norm": 7.4670538902282715, |
|
"learning_rate": 8.835555555555557e-06, |
|
"loss": 1.0353, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.688298918387414, |
|
"grad_norm": 6.965098857879639, |
|
"learning_rate": 8.78e-06, |
|
"loss": 0.9819, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7046869878728286, |
|
"grad_norm": 6.953728675842285, |
|
"learning_rate": 8.724444444444445e-06, |
|
"loss": 0.9931, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.7210750573582432, |
|
"grad_norm": 7.410843849182129, |
|
"learning_rate": 8.66888888888889e-06, |
|
"loss": 1.0368, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7374631268436578, |
|
"grad_norm": 6.275593280792236, |
|
"learning_rate": 8.613333333333333e-06, |
|
"loss": 0.974, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7538511963290724, |
|
"grad_norm": 5.739853382110596, |
|
"learning_rate": 8.557777777777778e-06, |
|
"loss": 0.9568, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7702392658144871, |
|
"grad_norm": 7.921228408813477, |
|
"learning_rate": 8.502222222222223e-06, |
|
"loss": 0.9109, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7866273352999017, |
|
"grad_norm": 6.905638694763184, |
|
"learning_rate": 8.446666666666668e-06, |
|
"loss": 0.9741, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8030154047853163, |
|
"grad_norm": 7.002967834472656, |
|
"learning_rate": 8.391111111111112e-06, |
|
"loss": 0.9512, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.8194034742707309, |
|
"grad_norm": 8.190713882446289, |
|
"learning_rate": 8.335555555555556e-06, |
|
"loss": 0.9474, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8357915437561455, |
|
"grad_norm": 6.305277347564697, |
|
"learning_rate": 8.28e-06, |
|
"loss": 0.9251, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8521796132415601, |
|
"grad_norm": 6.228824615478516, |
|
"learning_rate": 8.224444444444444e-06, |
|
"loss": 0.9186, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8685676827269747, |
|
"grad_norm": 7.2379679679870605, |
|
"learning_rate": 8.16888888888889e-06, |
|
"loss": 0.9062, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8849557522123894, |
|
"grad_norm": 5.009719371795654, |
|
"learning_rate": 8.113333333333334e-06, |
|
"loss": 0.9369, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.901343821697804, |
|
"grad_norm": 6.123809814453125, |
|
"learning_rate": 8.057777777777778e-06, |
|
"loss": 0.9473, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.9177318911832186, |
|
"grad_norm": 8.46691608428955, |
|
"learning_rate": 8.002222222222222e-06, |
|
"loss": 0.9325, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9341199606686332, |
|
"grad_norm": 6.441035747528076, |
|
"learning_rate": 7.946666666666666e-06, |
|
"loss": 0.9194, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9505080301540478, |
|
"grad_norm": 6.163531303405762, |
|
"learning_rate": 7.891111111111112e-06, |
|
"loss": 0.8699, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9668960996394624, |
|
"grad_norm": 6.857112407684326, |
|
"learning_rate": 7.835555555555556e-06, |
|
"loss": 0.8956, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.983284169124877, |
|
"grad_norm": 6.297347068786621, |
|
"learning_rate": 7.78e-06, |
|
"loss": 0.9163, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9996722386102918, |
|
"grad_norm": 6.199060440063477, |
|
"learning_rate": 7.724444444444446e-06, |
|
"loss": 0.8492, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.0160603080957062, |
|
"grad_norm": 5.396669864654541, |
|
"learning_rate": 7.66888888888889e-06, |
|
"loss": 0.669, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.0324483775811208, |
|
"grad_norm": 4.675485134124756, |
|
"learning_rate": 7.613333333333334e-06, |
|
"loss": 0.6436, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.0488364470665354, |
|
"grad_norm": 4.590639591217041, |
|
"learning_rate": 7.557777777777779e-06, |
|
"loss": 0.656, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.0652245165519503, |
|
"grad_norm": 5.911344528198242, |
|
"learning_rate": 7.502222222222223e-06, |
|
"loss": 0.6784, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.0816125860373649, |
|
"grad_norm": 5.509343147277832, |
|
"learning_rate": 7.446666666666668e-06, |
|
"loss": 0.694, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0980006555227795, |
|
"grad_norm": 5.67479944229126, |
|
"learning_rate": 7.3911111111111125e-06, |
|
"loss": 0.6891, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.114388725008194, |
|
"grad_norm": 6.981929302215576, |
|
"learning_rate": 7.335555555555556e-06, |
|
"loss": 0.6854, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.1307767944936087, |
|
"grad_norm": 5.943860054016113, |
|
"learning_rate": 7.280000000000001e-06, |
|
"loss": 0.6669, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.1471648639790233, |
|
"grad_norm": 6.462680816650391, |
|
"learning_rate": 7.224444444444445e-06, |
|
"loss": 0.6775, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.1635529334644379, |
|
"grad_norm": 5.6128621101379395, |
|
"learning_rate": 7.1688888888888895e-06, |
|
"loss": 0.6777, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.1799410029498525, |
|
"grad_norm": 5.527766227722168, |
|
"learning_rate": 7.113333333333334e-06, |
|
"loss": 0.6723, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.196329072435267, |
|
"grad_norm": 5.95067024230957, |
|
"learning_rate": 7.057777777777778e-06, |
|
"loss": 0.6477, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.2127171419206817, |
|
"grad_norm": 6.410384654998779, |
|
"learning_rate": 7.0022222222222225e-06, |
|
"loss": 0.6597, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.2291052114060963, |
|
"grad_norm": 5.738908767700195, |
|
"learning_rate": 6.946666666666667e-06, |
|
"loss": 0.6651, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.245493280891511, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 6.891111111111111e-06, |
|
"loss": 0.6647, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.2618813503769255, |
|
"grad_norm": 5.887701034545898, |
|
"learning_rate": 6.835555555555556e-06, |
|
"loss": 0.6963, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.2782694198623403, |
|
"grad_norm": 6.115960121154785, |
|
"learning_rate": 6.780000000000001e-06, |
|
"loss": 0.6637, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.2946574893477547, |
|
"grad_norm": 5.413832664489746, |
|
"learning_rate": 6.724444444444444e-06, |
|
"loss": 0.6849, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.3110455588331695, |
|
"grad_norm": 6.256831169128418, |
|
"learning_rate": 6.668888888888889e-06, |
|
"loss": 0.6606, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.3110455588331695, |
|
"eval_loss": 0.42234352231025696, |
|
"eval_runtime": 1655.8109, |
|
"eval_samples_per_second": 2.356, |
|
"eval_steps_per_second": 0.147, |
|
"eval_wer": 0.3116949399698634, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.3274336283185841, |
|
"grad_norm": 5.587663650512695, |
|
"learning_rate": 6.613333333333334e-06, |
|
"loss": 0.677, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.3438216978039987, |
|
"grad_norm": 5.522030830383301, |
|
"learning_rate": 6.557777777777778e-06, |
|
"loss": 0.6378, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.3602097672894133, |
|
"grad_norm": 5.9308953285217285, |
|
"learning_rate": 6.502222222222223e-06, |
|
"loss": 0.6468, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.376597836774828, |
|
"grad_norm": 6.899292469024658, |
|
"learning_rate": 6.446666666666668e-06, |
|
"loss": 0.6605, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.3929859062602425, |
|
"grad_norm": 5.966159343719482, |
|
"learning_rate": 6.391111111111111e-06, |
|
"loss": 0.6554, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.4093739757456571, |
|
"grad_norm": 5.988006114959717, |
|
"learning_rate": 6.335555555555556e-06, |
|
"loss": 0.6532, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.4257620452310718, |
|
"grad_norm": 5.561163902282715, |
|
"learning_rate": 6.280000000000001e-06, |
|
"loss": 0.6513, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.4421501147164864, |
|
"grad_norm": 5.755290508270264, |
|
"learning_rate": 6.224444444444445e-06, |
|
"loss": 0.6308, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.458538184201901, |
|
"grad_norm": 5.094759464263916, |
|
"learning_rate": 6.16888888888889e-06, |
|
"loss": 0.629, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.4749262536873156, |
|
"grad_norm": 5.547093391418457, |
|
"learning_rate": 6.113333333333333e-06, |
|
"loss": 0.6141, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.4913143231727304, |
|
"grad_norm": 5.79092264175415, |
|
"learning_rate": 6.057777777777778e-06, |
|
"loss": 0.6252, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.5077023926581448, |
|
"grad_norm": 5.5909271240234375, |
|
"learning_rate": 6.002222222222223e-06, |
|
"loss": 0.6533, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.5240904621435596, |
|
"grad_norm": 6.318563938140869, |
|
"learning_rate": 5.946666666666668e-06, |
|
"loss": 0.6474, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.540478531628974, |
|
"grad_norm": 6.217270851135254, |
|
"learning_rate": 5.891111111111112e-06, |
|
"loss": 0.6524, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.5568666011143888, |
|
"grad_norm": 6.372344970703125, |
|
"learning_rate": 5.8355555555555565e-06, |
|
"loss": 0.6401, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.5732546705998034, |
|
"grad_norm": 5.783897399902344, |
|
"learning_rate": 5.78e-06, |
|
"loss": 0.6285, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.589642740085218, |
|
"grad_norm": 5.293729305267334, |
|
"learning_rate": 5.724444444444445e-06, |
|
"loss": 0.6294, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.6060308095706326, |
|
"grad_norm": 5.974725246429443, |
|
"learning_rate": 5.6688888888888895e-06, |
|
"loss": 0.6373, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.6224188790560472, |
|
"grad_norm": 5.62349271774292, |
|
"learning_rate": 5.613333333333334e-06, |
|
"loss": 0.7165, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.6388069485414618, |
|
"grad_norm": 6.215331077575684, |
|
"learning_rate": 5.557777777777778e-06, |
|
"loss": 0.6337, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.6551950180268764, |
|
"grad_norm": 6.165288925170898, |
|
"learning_rate": 5.5022222222222224e-06, |
|
"loss": 0.6335, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.671583087512291, |
|
"grad_norm": 5.34564208984375, |
|
"learning_rate": 5.4466666666666665e-06, |
|
"loss": 0.6482, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.6879711569977056, |
|
"grad_norm": 6.15928316116333, |
|
"learning_rate": 5.391111111111111e-06, |
|
"loss": 0.631, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.7043592264831204, |
|
"grad_norm": 6.502092361450195, |
|
"learning_rate": 5.335555555555556e-06, |
|
"loss": 0.6239, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.7207472959685348, |
|
"grad_norm": 5.569223403930664, |
|
"learning_rate": 5.28e-06, |
|
"loss": 0.6203, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.7371353654539496, |
|
"grad_norm": 6.04316520690918, |
|
"learning_rate": 5.224444444444445e-06, |
|
"loss": 0.631, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.753523434939364, |
|
"grad_norm": 5.956827163696289, |
|
"learning_rate": 5.168888888888889e-06, |
|
"loss": 0.6421, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.7699115044247788, |
|
"grad_norm": 5.976485252380371, |
|
"learning_rate": 5.113333333333333e-06, |
|
"loss": 0.6041, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.7862995739101932, |
|
"grad_norm": 5.431225776672363, |
|
"learning_rate": 5.057777777777778e-06, |
|
"loss": 0.6521, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.802687643395608, |
|
"grad_norm": 5.089391708374023, |
|
"learning_rate": 5.002222222222223e-06, |
|
"loss": 0.608, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.8190757128810227, |
|
"grad_norm": 7.186110496520996, |
|
"learning_rate": 4.946666666666667e-06, |
|
"loss": 0.638, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.8354637823664373, |
|
"grad_norm": 5.833242416381836, |
|
"learning_rate": 4.891111111111111e-06, |
|
"loss": 0.6396, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.8518518518518519, |
|
"grad_norm": 4.990212440490723, |
|
"learning_rate": 4.835555555555556e-06, |
|
"loss": 0.6231, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.8682399213372665, |
|
"grad_norm": 5.418929100036621, |
|
"learning_rate": 4.78e-06, |
|
"loss": 0.6365, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.884627990822681, |
|
"grad_norm": 4.335667610168457, |
|
"learning_rate": 4.724444444444445e-06, |
|
"loss": 0.6426, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.9010160603080957, |
|
"grad_norm": 4.153316020965576, |
|
"learning_rate": 4.66888888888889e-06, |
|
"loss": 0.6197, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.9174041297935103, |
|
"grad_norm": 4.748465538024902, |
|
"learning_rate": 4.613333333333334e-06, |
|
"loss": 0.6105, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.9337921992789249, |
|
"grad_norm": 4.694342136383057, |
|
"learning_rate": 4.557777777777778e-06, |
|
"loss": 0.6158, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.9501802687643397, |
|
"grad_norm": 5.531050205230713, |
|
"learning_rate": 4.502222222222223e-06, |
|
"loss": 0.6089, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.966568338249754, |
|
"grad_norm": 5.279643535614014, |
|
"learning_rate": 4.446666666666667e-06, |
|
"loss": 0.6065, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.966568338249754, |
|
"eval_loss": 0.3859391212463379, |
|
"eval_runtime": 1670.3478, |
|
"eval_samples_per_second": 2.335, |
|
"eval_steps_per_second": 0.146, |
|
"eval_wer": 0.2873426335488261, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.982956407735169, |
|
"grad_norm": 4.542531967163086, |
|
"learning_rate": 4.391111111111112e-06, |
|
"loss": 0.6178, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.9993444772205833, |
|
"grad_norm": 5.153913974761963, |
|
"learning_rate": 4.3355555555555565e-06, |
|
"loss": 0.6124, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.015732546705998, |
|
"grad_norm": 4.631136417388916, |
|
"learning_rate": 4.2800000000000005e-06, |
|
"loss": 0.4396, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.0321206161914125, |
|
"grad_norm": 5.14950704574585, |
|
"learning_rate": 4.2244444444444446e-06, |
|
"loss": 0.3907, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.0485086856768273, |
|
"grad_norm": 4.144317150115967, |
|
"learning_rate": 4.168888888888889e-06, |
|
"loss": 0.39, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.0648967551622417, |
|
"grad_norm": 4.183619499206543, |
|
"learning_rate": 4.1133333333333335e-06, |
|
"loss": 0.4086, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.0812848246476565, |
|
"grad_norm": 5.38789176940918, |
|
"learning_rate": 4.057777777777778e-06, |
|
"loss": 0.3987, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.097672894133071, |
|
"grad_norm": 5.071357727050781, |
|
"learning_rate": 4.002222222222222e-06, |
|
"loss": 0.4209, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.1140609636184857, |
|
"grad_norm": 5.261667728424072, |
|
"learning_rate": 3.946666666666667e-06, |
|
"loss": 0.4061, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.1304490331039005, |
|
"grad_norm": 4.820098876953125, |
|
"learning_rate": 3.891111111111111e-06, |
|
"loss": 0.4132, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.146837102589315, |
|
"grad_norm": 5.199064254760742, |
|
"learning_rate": 3.835555555555555e-06, |
|
"loss": 0.41, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.1632251720747298, |
|
"grad_norm": 4.427770614624023, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 0.3957, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.179613241560144, |
|
"grad_norm": 4.931752681732178, |
|
"learning_rate": 3.724444444444445e-06, |
|
"loss": 0.4051, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.196001311045559, |
|
"grad_norm": 4.410216331481934, |
|
"learning_rate": 3.668888888888889e-06, |
|
"loss": 0.3908, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.2123893805309733, |
|
"grad_norm": 5.322948455810547, |
|
"learning_rate": 3.6133333333333336e-06, |
|
"loss": 0.3829, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.228777450016388, |
|
"grad_norm": 4.69309663772583, |
|
"learning_rate": 3.5577777777777785e-06, |
|
"loss": 0.3768, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.2451655195018025, |
|
"grad_norm": 5.363163471221924, |
|
"learning_rate": 3.5022222222222225e-06, |
|
"loss": 0.4241, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.2615535889872174, |
|
"grad_norm": 5.2487688064575195, |
|
"learning_rate": 3.446666666666667e-06, |
|
"loss": 0.4001, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.2779416584726317, |
|
"grad_norm": 4.444194316864014, |
|
"learning_rate": 3.391111111111111e-06, |
|
"loss": 0.4114, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.2943297279580466, |
|
"grad_norm": 4.2093892097473145, |
|
"learning_rate": 3.335555555555556e-06, |
|
"loss": 0.4056, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.310717797443461, |
|
"grad_norm": 4.929553508758545, |
|
"learning_rate": 3.2800000000000004e-06, |
|
"loss": 0.3952, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.3271058669288758, |
|
"grad_norm": 5.331225395202637, |
|
"learning_rate": 3.2244444444444444e-06, |
|
"loss": 0.381, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.3434939364142906, |
|
"grad_norm": 4.381256580352783, |
|
"learning_rate": 3.1688888888888893e-06, |
|
"loss": 0.399, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.359882005899705, |
|
"grad_norm": 4.569367408752441, |
|
"learning_rate": 3.1133333333333337e-06, |
|
"loss": 0.4195, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.37627007538512, |
|
"grad_norm": 5.157946586608887, |
|
"learning_rate": 3.0577777777777778e-06, |
|
"loss": 0.4796, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.392658144870534, |
|
"grad_norm": 4.326797008514404, |
|
"learning_rate": 3.0022222222222227e-06, |
|
"loss": 0.4094, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.409046214355949, |
|
"grad_norm": 5.1377081871032715, |
|
"learning_rate": 2.946666666666667e-06, |
|
"loss": 0.3983, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.4254342838413634, |
|
"grad_norm": 5.24452543258667, |
|
"learning_rate": 2.891111111111111e-06, |
|
"loss": 0.4243, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.441822353326778, |
|
"grad_norm": 5.172399997711182, |
|
"learning_rate": 2.835555555555556e-06, |
|
"loss": 0.3962, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.4582104228121926, |
|
"grad_norm": 4.068329811096191, |
|
"learning_rate": 2.7800000000000005e-06, |
|
"loss": 0.3857, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.4745984922976074, |
|
"grad_norm": 5.2074995040893555, |
|
"learning_rate": 2.7244444444444445e-06, |
|
"loss": 0.4035, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 2.490986561783022, |
|
"grad_norm": 6.278205871582031, |
|
"learning_rate": 2.6688888888888894e-06, |
|
"loss": 0.4125, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.5073746312684366, |
|
"grad_norm": 5.124298572540283, |
|
"learning_rate": 2.6133333333333334e-06, |
|
"loss": 0.4153, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 2.523762700753851, |
|
"grad_norm": 5.120030879974365, |
|
"learning_rate": 2.557777777777778e-06, |
|
"loss": 0.4178, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.540150770239266, |
|
"grad_norm": 5.407374382019043, |
|
"learning_rate": 2.5022222222222224e-06, |
|
"loss": 0.4044, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 2.5565388397246807, |
|
"grad_norm": 4.730270862579346, |
|
"learning_rate": 2.446666666666667e-06, |
|
"loss": 0.3733, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.572926909210095, |
|
"grad_norm": 4.6310224533081055, |
|
"learning_rate": 2.3911111111111113e-06, |
|
"loss": 0.4064, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 2.5893149786955094, |
|
"grad_norm": 3.4895918369293213, |
|
"learning_rate": 2.3355555555555557e-06, |
|
"loss": 0.3976, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.6057030481809242, |
|
"grad_norm": 5.346146106719971, |
|
"learning_rate": 2.28e-06, |
|
"loss": 0.3833, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 2.622091117666339, |
|
"grad_norm": 4.933919906616211, |
|
"learning_rate": 2.2244444444444447e-06, |
|
"loss": 0.3894, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.622091117666339, |
|
"eval_loss": 0.39615824818611145, |
|
"eval_runtime": 1630.0413, |
|
"eval_samples_per_second": 2.393, |
|
"eval_steps_per_second": 0.15, |
|
"eval_wer": 0.2787391240946872, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.6384791871517534, |
|
"grad_norm": 4.994978427886963, |
|
"learning_rate": 2.168888888888889e-06, |
|
"loss": 0.3979, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 2.6548672566371683, |
|
"grad_norm": 5.25327730178833, |
|
"learning_rate": 2.1133333333333336e-06, |
|
"loss": 0.3912, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.6712553261225827, |
|
"grad_norm": 5.466716766357422, |
|
"learning_rate": 2.057777777777778e-06, |
|
"loss": 0.3953, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 2.6876433956079975, |
|
"grad_norm": 4.965219497680664, |
|
"learning_rate": 2.0022222222222225e-06, |
|
"loss": 0.3809, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.704031465093412, |
|
"grad_norm": 5.104487895965576, |
|
"learning_rate": 1.9466666666666665e-06, |
|
"loss": 0.389, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 2.7204195345788267, |
|
"grad_norm": 5.978146076202393, |
|
"learning_rate": 1.8911111111111114e-06, |
|
"loss": 0.3995, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.736807604064241, |
|
"grad_norm": 5.592274188995361, |
|
"learning_rate": 1.8355555555555557e-06, |
|
"loss": 0.3829, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 2.753195673549656, |
|
"grad_norm": 4.805532932281494, |
|
"learning_rate": 1.7800000000000001e-06, |
|
"loss": 0.387, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.7695837430350707, |
|
"grad_norm": 4.371249198913574, |
|
"learning_rate": 1.7244444444444448e-06, |
|
"loss": 0.3969, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 2.785971812520485, |
|
"grad_norm": 5.515538215637207, |
|
"learning_rate": 1.668888888888889e-06, |
|
"loss": 0.3804, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.8023598820058995, |
|
"grad_norm": 4.387998104095459, |
|
"learning_rate": 1.6133333333333335e-06, |
|
"loss": 0.3976, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 2.8187479514913143, |
|
"grad_norm": 6.351314544677734, |
|
"learning_rate": 1.5577777777777777e-06, |
|
"loss": 0.423, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.835136020976729, |
|
"grad_norm": 5.132925987243652, |
|
"learning_rate": 1.5022222222222224e-06, |
|
"loss": 0.3954, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 2.8515240904621435, |
|
"grad_norm": 4.940093517303467, |
|
"learning_rate": 1.4466666666666669e-06, |
|
"loss": 0.3874, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.8679121599475583, |
|
"grad_norm": 4.280588626861572, |
|
"learning_rate": 1.3911111111111111e-06, |
|
"loss": 0.3962, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 2.8843002294329727, |
|
"grad_norm": 4.708969593048096, |
|
"learning_rate": 1.3355555555555558e-06, |
|
"loss": 0.3874, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.9006882989183875, |
|
"grad_norm": 4.572214126586914, |
|
"learning_rate": 1.28e-06, |
|
"loss": 0.3618, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 2.917076368403802, |
|
"grad_norm": 5.374238967895508, |
|
"learning_rate": 1.2244444444444445e-06, |
|
"loss": 0.3789, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.9334644378892167, |
|
"grad_norm": 4.603006362915039, |
|
"learning_rate": 1.168888888888889e-06, |
|
"loss": 0.3744, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 2.949852507374631, |
|
"grad_norm": 5.514355659484863, |
|
"learning_rate": 1.1133333333333334e-06, |
|
"loss": 0.3705, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.966240576860046, |
|
"grad_norm": 5.96732234954834, |
|
"learning_rate": 1.0577777777777779e-06, |
|
"loss": 0.3816, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 2.9826286463454608, |
|
"grad_norm": 4.869338035583496, |
|
"learning_rate": 1.0022222222222223e-06, |
|
"loss": 0.4033, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.999016715830875, |
|
"grad_norm": 4.8514404296875, |
|
"learning_rate": 9.466666666666667e-07, |
|
"loss": 0.3862, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 3.0154047853162895, |
|
"grad_norm": 4.073346138000488, |
|
"learning_rate": 8.911111111111112e-07, |
|
"loss": 0.2642, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.0317928548017044, |
|
"grad_norm": 3.8675925731658936, |
|
"learning_rate": 8.355555555555556e-07, |
|
"loss": 0.2484, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 3.048180924287119, |
|
"grad_norm": 4.085783004760742, |
|
"learning_rate": 7.8e-07, |
|
"loss": 0.253, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.0645689937725336, |
|
"grad_norm": 5.922070026397705, |
|
"learning_rate": 7.244444444444446e-07, |
|
"loss": 0.2572, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 3.0809570632579484, |
|
"grad_norm": 3.9240541458129883, |
|
"learning_rate": 6.68888888888889e-07, |
|
"loss": 0.2628, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.0973451327433628, |
|
"grad_norm": 3.6416871547698975, |
|
"learning_rate": 6.133333333333333e-07, |
|
"loss": 0.2762, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 3.1137332022287776, |
|
"grad_norm": 3.966794729232788, |
|
"learning_rate": 5.577777777777779e-07, |
|
"loss": 0.2641, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.130121271714192, |
|
"grad_norm": 4.7765045166015625, |
|
"learning_rate": 5.022222222222222e-07, |
|
"loss": 0.2581, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 3.146509341199607, |
|
"grad_norm": 4.1129608154296875, |
|
"learning_rate": 4.466666666666667e-07, |
|
"loss": 0.2715, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.162897410685021, |
|
"grad_norm": 3.8220412731170654, |
|
"learning_rate": 3.9111111111111115e-07, |
|
"loss": 0.2639, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 3.179285480170436, |
|
"grad_norm": 4.8853912353515625, |
|
"learning_rate": 3.3555555555555556e-07, |
|
"loss": 0.2666, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.1956735496558504, |
|
"grad_norm": 4.593063831329346, |
|
"learning_rate": 2.8e-07, |
|
"loss": 0.2639, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 3.212061619141265, |
|
"grad_norm": 3.6766130924224854, |
|
"learning_rate": 2.2444444444444445e-07, |
|
"loss": 0.2571, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.2284496886266796, |
|
"grad_norm": 4.200336933135986, |
|
"learning_rate": 1.6888888888888888e-07, |
|
"loss": 0.2507, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 3.2448377581120944, |
|
"grad_norm": 4.104145050048828, |
|
"learning_rate": 1.1333333333333336e-07, |
|
"loss": 0.2527, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.2612258275975092, |
|
"grad_norm": 3.6322615146636963, |
|
"learning_rate": 5.777777777777778e-08, |
|
"loss": 0.2512, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 3.2776138970829236, |
|
"grad_norm": 3.6525533199310303, |
|
"learning_rate": 2.2222222222222225e-09, |
|
"loss": 0.2478, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.2776138970829236, |
|
"eval_loss": 0.41265004873275757, |
|
"eval_runtime": 1618.8893, |
|
"eval_samples_per_second": 2.41, |
|
"eval_steps_per_second": 0.151, |
|
"eval_wer": 0.2775239391435376, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.2776138970829236, |
|
"step": 5000, |
|
"total_flos": 5.435895365546803e+20, |
|
"train_loss": 0.7020168427467346, |
|
"train_runtime": 60500.8955, |
|
"train_samples_per_second": 2.645, |
|
"train_steps_per_second": 0.083 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.435895365546803e+20, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|