|
{ |
|
"best_metric": 27.692698197817073, |
|
"best_model_checkpoint": "./whisper-large-sindhi/checkpoint-500", |
|
"epoch": 76.88888888888889, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.505149978319905e-06, |
|
"loss": 3.6387, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 8.31378915840787e-06, |
|
"loss": 0.5812, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 9.28666248215634e-06, |
|
"loss": 0.2363, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 9.956130378462474e-06, |
|
"loss": 0.0927, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 9.755555555555556e-06, |
|
"loss": 0.0352, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"learning_rate": 9.466666666666667e-06, |
|
"loss": 0.0142, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 9.17777777777778e-06, |
|
"loss": 0.0081, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.0046, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"learning_rate": 8.6e-06, |
|
"loss": 0.0037, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"learning_rate": 8.311111111111111e-06, |
|
"loss": 0.0028, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 21.15, |
|
"learning_rate": 8.022222222222222e-06, |
|
"loss": 0.0023, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 23.07, |
|
"learning_rate": 7.733333333333334e-06, |
|
"loss": 0.0011, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 24.96, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.0009, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 26.89, |
|
"learning_rate": 7.155555555555556e-06, |
|
"loss": 0.0008, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 28.81, |
|
"learning_rate": 6.866666666666667e-06, |
|
"loss": 0.0007, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 30.74, |
|
"learning_rate": 6.577777777777779e-06, |
|
"loss": 0.0006, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 32.67, |
|
"learning_rate": 6.28888888888889e-06, |
|
"loss": 0.0006, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 34.59, |
|
"learning_rate": 6e-06, |
|
"loss": 0.0005, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 36.52, |
|
"learning_rate": 5.711111111111112e-06, |
|
"loss": 0.0005, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 38.44, |
|
"learning_rate": 5.422222222222223e-06, |
|
"loss": 0.0005, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 38.44, |
|
"eval_loss": 0.63818359375, |
|
"eval_runtime": 605.6018, |
|
"eval_samples_per_second": 1.618, |
|
"eval_steps_per_second": 0.026, |
|
"eval_wer": 27.692698197817073, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 40.37, |
|
"learning_rate": 5.133333333333334e-06, |
|
"loss": 0.0004, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 42.3, |
|
"learning_rate": 4.8444444444444446e-06, |
|
"loss": 0.0004, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 44.22, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 0.0004, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 46.15, |
|
"learning_rate": 4.266666666666668e-06, |
|
"loss": 0.0004, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 48.07, |
|
"learning_rate": 3.977777777777778e-06, |
|
"loss": 0.0004, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 49.96, |
|
"learning_rate": 3.7e-06, |
|
"loss": 0.0003, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 51.89, |
|
"learning_rate": 3.4111111111111113e-06, |
|
"loss": 0.0003, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 53.81, |
|
"learning_rate": 3.1222222222222228e-06, |
|
"loss": 0.0003, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 55.74, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 0.0003, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 57.67, |
|
"learning_rate": 2.5444444444444446e-06, |
|
"loss": 0.0003, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 59.59, |
|
"learning_rate": 2.2555555555555557e-06, |
|
"loss": 0.0003, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 61.52, |
|
"learning_rate": 1.9666666666666668e-06, |
|
"loss": 0.0003, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 63.44, |
|
"learning_rate": 1.6777777777777779e-06, |
|
"loss": 0.0003, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 65.37, |
|
"learning_rate": 1.3888888888888892e-06, |
|
"loss": 0.0003, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 67.3, |
|
"learning_rate": 1.1e-06, |
|
"loss": 0.0003, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 69.22, |
|
"learning_rate": 8.111111111111112e-07, |
|
"loss": 0.0003, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 71.15, |
|
"learning_rate": 5.222222222222223e-07, |
|
"loss": 0.0003, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 73.07, |
|
"learning_rate": 2.3333333333333336e-07, |
|
"loss": 0.0003, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 74.96, |
|
"learning_rate": 0.0, |
|
"loss": 0.0003, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 76.89, |
|
"learning_rate": 0.0, |
|
"loss": 0.0003, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 76.89, |
|
"eval_loss": 0.67138671875, |
|
"eval_runtime": 605.19, |
|
"eval_samples_per_second": 1.619, |
|
"eval_steps_per_second": 0.026, |
|
"eval_wer": 27.832303917421104, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 76.89, |
|
"step": 1000, |
|
"total_flos": 1.384647263524592e+20, |
|
"train_loss": 0.1158139866143465, |
|
"train_runtime": 9638.6666, |
|
"train_samples_per_second": 6.64, |
|
"train_steps_per_second": 0.104 |
|
} |
|
], |
|
"max_steps": 1000, |
|
"num_train_epochs": 77, |
|
"total_flos": 1.384647263524592e+20, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|