|
{ |
|
"best_metric": 0.8709677419354839, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-mobile-eye-tracking-dataset-v2/checkpoint-45", |
|
"epoch": 26.666666666666668, |
|
"eval_steps": 500, |
|
"global_step": 60, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"eval_accuracy": 0.22580645161290322, |
|
"eval_loss": 1.775622844696045, |
|
"eval_runtime": 7.2696, |
|
"eval_samples_per_second": 4.264, |
|
"eval_steps_per_second": 0.138, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"eval_accuracy": 0.25806451612903225, |
|
"eval_loss": 1.6783900260925293, |
|
"eval_runtime": 5.963, |
|
"eval_samples_per_second": 5.199, |
|
"eval_steps_per_second": 0.168, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"eval_accuracy": 0.3225806451612903, |
|
"eval_loss": 1.5861129760742188, |
|
"eval_runtime": 5.4576, |
|
"eval_samples_per_second": 5.68, |
|
"eval_steps_per_second": 0.183, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.41935483870967744, |
|
"eval_loss": 1.3571434020996094, |
|
"eval_runtime": 5.4352, |
|
"eval_samples_per_second": 5.704, |
|
"eval_steps_per_second": 0.184, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 4.888888888888889, |
|
"eval_accuracy": 0.5483870967741935, |
|
"eval_loss": 1.099271535873413, |
|
"eval_runtime": 5.4311, |
|
"eval_samples_per_second": 5.708, |
|
"eval_steps_per_second": 0.184, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 5.777777777777778, |
|
"eval_accuracy": 0.6451612903225806, |
|
"eval_loss": 0.9241538047790527, |
|
"eval_runtime": 5.4175, |
|
"eval_samples_per_second": 5.722, |
|
"eval_steps_per_second": 0.185, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 4.35577917098999, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 1.4667, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"eval_accuracy": 0.7096774193548387, |
|
"eval_loss": 0.7538339495658875, |
|
"eval_runtime": 5.4027, |
|
"eval_samples_per_second": 5.738, |
|
"eval_steps_per_second": 0.185, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.7741935483870968, |
|
"eval_loss": 0.6294359564781189, |
|
"eval_runtime": 5.4179, |
|
"eval_samples_per_second": 5.722, |
|
"eval_steps_per_second": 0.185, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 8.88888888888889, |
|
"eval_accuracy": 0.7096774193548387, |
|
"eval_loss": 0.5325695872306824, |
|
"eval_runtime": 5.4264, |
|
"eval_samples_per_second": 5.713, |
|
"eval_steps_per_second": 0.184, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 9.777777777777779, |
|
"eval_accuracy": 0.7419354838709677, |
|
"eval_loss": 0.48480212688446045, |
|
"eval_runtime": 5.432, |
|
"eval_samples_per_second": 5.707, |
|
"eval_steps_per_second": 0.184, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 10.666666666666666, |
|
"eval_accuracy": 0.7741935483870968, |
|
"eval_loss": 0.4832201302051544, |
|
"eval_runtime": 5.482, |
|
"eval_samples_per_second": 5.655, |
|
"eval_steps_per_second": 0.182, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.7741935483870968, |
|
"eval_loss": 0.44829437136650085, |
|
"eval_runtime": 5.3703, |
|
"eval_samples_per_second": 5.772, |
|
"eval_steps_per_second": 0.186, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 12.88888888888889, |
|
"eval_accuracy": 0.7741935483870968, |
|
"eval_loss": 0.4296128451824188, |
|
"eval_runtime": 5.4565, |
|
"eval_samples_per_second": 5.681, |
|
"eval_steps_per_second": 0.183, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 13.333333333333334, |
|
"grad_norm": 5.256907939910889, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.5925, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 13.777777777777779, |
|
"eval_accuracy": 0.7741935483870968, |
|
"eval_loss": 0.40228280425071716, |
|
"eval_runtime": 5.3947, |
|
"eval_samples_per_second": 5.746, |
|
"eval_steps_per_second": 0.185, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 14.666666666666666, |
|
"eval_accuracy": 0.8387096774193549, |
|
"eval_loss": 0.4110867977142334, |
|
"eval_runtime": 5.4946, |
|
"eval_samples_per_second": 5.642, |
|
"eval_steps_per_second": 0.182, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.8064516129032258, |
|
"eval_loss": 0.387315571308136, |
|
"eval_runtime": 5.4222, |
|
"eval_samples_per_second": 5.717, |
|
"eval_steps_per_second": 0.184, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 16.88888888888889, |
|
"eval_accuracy": 0.8064516129032258, |
|
"eval_loss": 0.4028545618057251, |
|
"eval_runtime": 5.4659, |
|
"eval_samples_per_second": 5.672, |
|
"eval_steps_per_second": 0.183, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 17.77777777777778, |
|
"eval_accuracy": 0.8064516129032258, |
|
"eval_loss": 0.4065493047237396, |
|
"eval_runtime": 5.394, |
|
"eval_samples_per_second": 5.747, |
|
"eval_steps_per_second": 0.185, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 18.666666666666668, |
|
"eval_accuracy": 0.8064516129032258, |
|
"eval_loss": 0.38641268014907837, |
|
"eval_runtime": 5.4499, |
|
"eval_samples_per_second": 5.688, |
|
"eval_steps_per_second": 0.183, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 5.7712812423706055, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.3285, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.8709677419354839, |
|
"eval_loss": 0.39681780338287354, |
|
"eval_runtime": 5.3981, |
|
"eval_samples_per_second": 5.743, |
|
"eval_steps_per_second": 0.185, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 20.88888888888889, |
|
"eval_accuracy": 0.8709677419354839, |
|
"eval_loss": 0.3929939270019531, |
|
"eval_runtime": 5.3507, |
|
"eval_samples_per_second": 5.794, |
|
"eval_steps_per_second": 0.187, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 21.77777777777778, |
|
"eval_accuracy": 0.8709677419354839, |
|
"eval_loss": 0.3871462643146515, |
|
"eval_runtime": 5.3376, |
|
"eval_samples_per_second": 5.808, |
|
"eval_steps_per_second": 0.187, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 22.666666666666668, |
|
"eval_accuracy": 0.8064516129032258, |
|
"eval_loss": 0.3779211640357971, |
|
"eval_runtime": 5.365, |
|
"eval_samples_per_second": 5.778, |
|
"eval_steps_per_second": 0.186, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy": 0.8064516129032258, |
|
"eval_loss": 0.36982351541519165, |
|
"eval_runtime": 5.3833, |
|
"eval_samples_per_second": 5.759, |
|
"eval_steps_per_second": 0.186, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 24.88888888888889, |
|
"eval_accuracy": 0.8387096774193549, |
|
"eval_loss": 0.37262630462646484, |
|
"eval_runtime": 5.3782, |
|
"eval_samples_per_second": 5.764, |
|
"eval_steps_per_second": 0.186, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 25.77777777777778, |
|
"eval_accuracy": 0.8387096774193549, |
|
"eval_loss": 0.3732232451438904, |
|
"eval_runtime": 5.4396, |
|
"eval_samples_per_second": 5.699, |
|
"eval_steps_per_second": 0.184, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 26.666666666666668, |
|
"grad_norm": 3.0856940746307373, |
|
"learning_rate": 0.0, |
|
"loss": 0.2621, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 26.666666666666668, |
|
"eval_accuracy": 0.8387096774193549, |
|
"eval_loss": 0.3731651306152344, |
|
"eval_runtime": 6.0663, |
|
"eval_samples_per_second": 5.11, |
|
"eval_steps_per_second": 0.165, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 26.666666666666668, |
|
"step": 60, |
|
"total_flos": 3.811843351809884e+17, |
|
"train_loss": 0.6624618768692017, |
|
"train_runtime": 3041.8152, |
|
"train_samples_per_second": 5.671, |
|
"train_steps_per_second": 0.02 |
|
} |
|
], |
|
"logging_steps": 15, |
|
"max_steps": 60, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 30, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.811843351809884e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|