dima806's picture
Upload folder using huggingface_hub
61372ce verified
raw
history blame
1.62 kB
{
"best_metric": 2.643799304962158,
"best_model_checkpoint": "cat_breed_image_detection/checkpoint-3811",
"epoch": 1.0,
"eval_steps": 500,
"global_step": 3811,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.13,
"learning_rate": 2.641052911459718e-07,
"loss": 2.6442,
"step": 500
},
{
"epoch": 0.26,
"learning_rate": 2.2422228130816273e-07,
"loss": 2.6413,
"step": 1000
},
{
"epoch": 0.39,
"learning_rate": 1.8433927147035363e-07,
"loss": 2.63,
"step": 1500
},
{
"epoch": 0.52,
"learning_rate": 1.444562616325445e-07,
"loss": 2.6267,
"step": 2000
},
{
"epoch": 0.66,
"learning_rate": 1.0457325179473543e-07,
"loss": 2.6237,
"step": 2500
},
{
"epoch": 0.79,
"learning_rate": 6.469024195692635e-08,
"loss": 2.6351,
"step": 3000
},
{
"epoch": 0.92,
"learning_rate": 2.4807232119117254e-08,
"loss": 2.6347,
"step": 3500
},
{
"epoch": 1.0,
"eval_accuracy": 0.39673752583407146,
"eval_loss": 2.643799304962158,
"eval_runtime": 303.04,
"eval_samples_per_second": 89.414,
"eval_steps_per_second": 2.795,
"step": 3811
}
],
"logging_steps": 500,
"max_steps": 3811,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 1.8905297684573454e+19,
"train_batch_size": 64,
"trial_name": null,
"trial_params": null
}