Marcos12886's picture
Upload folder using huggingface_hub
a007ad9 verified
raw
history blame
4.25 kB
{
"best_metric": 0.9860805860805861,
"best_model_checkpoint": "distilhubert-finetuned-cry-detector/checkpoint-512",
"epoch": 7.964860907759883,
"eval_steps": 500,
"global_step": 680,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.9956076134699854,
"eval_accuracy": 0.9641025641025641,
"eval_f1": 0.9637911305761953,
"eval_loss": 0.12035853415727615,
"eval_precision": 0.9640509308043554,
"eval_recall": 0.9641025641025641,
"eval_runtime": 6.052,
"eval_samples_per_second": 225.545,
"eval_steps_per_second": 28.255,
"step": 85
},
{
"epoch": 1.9912152269399708,
"eval_accuracy": 0.9772893772893773,
"eval_f1": 0.9772624500008753,
"eval_loss": 0.08473604917526245,
"eval_precision": 0.9772470873644479,
"eval_recall": 0.9772893772893773,
"eval_runtime": 5.9068,
"eval_samples_per_second": 231.09,
"eval_steps_per_second": 28.95,
"step": 170
},
{
"epoch": 2.998535871156662,
"eval_accuracy": 0.9765567765567765,
"eval_f1": 0.9766464967450174,
"eval_loss": 0.10245847702026367,
"eval_precision": 0.9768621127754256,
"eval_recall": 0.9765567765567765,
"eval_runtime": 5.8911,
"eval_samples_per_second": 231.706,
"eval_steps_per_second": 29.027,
"step": 256
},
{
"epoch": 3.994143484626647,
"eval_accuracy": 0.9831501831501831,
"eval_f1": 0.9832333934847322,
"eval_loss": 0.08685966581106186,
"eval_precision": 0.9835305336122597,
"eval_recall": 0.9831501831501831,
"eval_runtime": 5.9568,
"eval_samples_per_second": 229.152,
"eval_steps_per_second": 28.707,
"step": 341
},
{
"epoch": 4.989751098096632,
"eval_accuracy": 0.9831501831501831,
"eval_f1": 0.983208365019189,
"eval_loss": 0.07458490133285522,
"eval_precision": 0.9833696765629429,
"eval_recall": 0.9831501831501831,
"eval_runtime": 5.9419,
"eval_samples_per_second": 229.726,
"eval_steps_per_second": 28.779,
"step": 426
},
{
"epoch": 5.856515373352855,
"grad_norm": 0.01886621303856373,
"learning_rate": 1.6360497736760383e-05,
"loss": 0.0538,
"step": 500
},
{
"epoch": 5.997071742313324,
"eval_accuracy": 0.9860805860805861,
"eval_f1": 0.9860860332682386,
"eval_loss": 0.08699370175600052,
"eval_precision": 0.9860927746075414,
"eval_recall": 0.9860805860805861,
"eval_runtime": 5.9399,
"eval_samples_per_second": 229.803,
"eval_steps_per_second": 28.789,
"step": 512
},
{
"epoch": 6.992679355783309,
"eval_accuracy": 0.9860805860805861,
"eval_f1": 0.9860860332682386,
"eval_loss": 0.08903729915618896,
"eval_precision": 0.9860927746075414,
"eval_recall": 0.9860805860805861,
"eval_runtime": 5.9207,
"eval_samples_per_second": 230.546,
"eval_steps_per_second": 28.882,
"step": 597
},
{
"epoch": 7.964860907759883,
"eval_accuracy": 0.9860805860805861,
"eval_f1": 0.9860860332682386,
"eval_loss": 0.08778514713048935,
"eval_precision": 0.9860927746075414,
"eval_recall": 0.9860805860805861,
"eval_runtime": 5.9724,
"eval_samples_per_second": 228.552,
"eval_steps_per_second": 28.632,
"step": 680
}
],
"logging_steps": 500,
"max_steps": 680,
"num_input_tokens_seen": 0,
"num_train_epochs": 8,
"save_steps": 500,
"stateful_callbacks": {
"EarlyStoppingCallback": {
"args": {
"early_stopping_patience": 3,
"early_stopping_threshold": 0.0
},
"attributes": {
"early_stopping_patience_counter": 0
}
},
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 9.8908515478272e+16,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}