|
{ |
|
"best_metric": 0.9777131782945736, |
|
"best_model_checkpoint": "swinv2-tiny-patch4-window16-256-finetuned-plantdisease/checkpoint-516", |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 516, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01937984496124031, |
|
"grad_norm": 6.709539890289307, |
|
"learning_rate": 9.615384615384616e-06, |
|
"loss": 2.7078, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03875968992248062, |
|
"grad_norm": 7.255009651184082, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 2.5761, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"grad_norm": 7.875271320343018, |
|
"learning_rate": 2.8846153846153845e-05, |
|
"loss": 2.331, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07751937984496124, |
|
"grad_norm": 9.543013572692871, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 2.1674, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09689922480620156, |
|
"grad_norm": 13.559645652770996, |
|
"learning_rate": 4.8076923076923084e-05, |
|
"loss": 1.7745, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"grad_norm": 16.62691307067871, |
|
"learning_rate": 4.913793103448276e-05, |
|
"loss": 1.3937, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13565891472868216, |
|
"grad_norm": 15.683610916137695, |
|
"learning_rate": 4.806034482758621e-05, |
|
"loss": 1.0158, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15503875968992248, |
|
"grad_norm": 14.473851203918457, |
|
"learning_rate": 4.698275862068966e-05, |
|
"loss": 0.8055, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"grad_norm": 14.953446388244629, |
|
"learning_rate": 4.590517241379311e-05, |
|
"loss": 0.6449, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1937984496124031, |
|
"grad_norm": 11.831011772155762, |
|
"learning_rate": 4.482758620689655e-05, |
|
"loss": 0.6051, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2131782945736434, |
|
"grad_norm": 13.278793334960938, |
|
"learning_rate": 4.375e-05, |
|
"loss": 0.5566, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 11.702147483825684, |
|
"learning_rate": 4.267241379310345e-05, |
|
"loss": 0.4845, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25193798449612403, |
|
"grad_norm": 10.426274299621582, |
|
"learning_rate": 4.1594827586206896e-05, |
|
"loss": 0.3641, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2713178294573643, |
|
"grad_norm": 17.529104232788086, |
|
"learning_rate": 4.0517241379310344e-05, |
|
"loss": 0.3593, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.29069767441860467, |
|
"grad_norm": 12.532708168029785, |
|
"learning_rate": 3.94396551724138e-05, |
|
"loss": 0.3459, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.31007751937984496, |
|
"grad_norm": 16.44886589050293, |
|
"learning_rate": 3.8362068965517246e-05, |
|
"loss": 0.4475, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.32945736434108525, |
|
"grad_norm": 8.827244758605957, |
|
"learning_rate": 3.7284482758620694e-05, |
|
"loss": 0.3544, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3488372093023256, |
|
"grad_norm": 6.035519123077393, |
|
"learning_rate": 3.620689655172414e-05, |
|
"loss": 0.3214, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3682170542635659, |
|
"grad_norm": 7.568902969360352, |
|
"learning_rate": 3.512931034482759e-05, |
|
"loss": 0.2359, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3875968992248062, |
|
"grad_norm": 13.209157943725586, |
|
"learning_rate": 3.405172413793103e-05, |
|
"loss": 0.34, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4069767441860465, |
|
"grad_norm": 19.102611541748047, |
|
"learning_rate": 3.297413793103448e-05, |
|
"loss": 0.3697, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4263565891472868, |
|
"grad_norm": 12.98582649230957, |
|
"learning_rate": 3.1896551724137935e-05, |
|
"loss": 0.2864, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.44573643410852715, |
|
"grad_norm": 14.064764976501465, |
|
"learning_rate": 3.081896551724138e-05, |
|
"loss": 0.2606, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 13.41550064086914, |
|
"learning_rate": 2.974137931034483e-05, |
|
"loss": 0.2949, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4844961240310077, |
|
"grad_norm": 7.437551498413086, |
|
"learning_rate": 2.866379310344828e-05, |
|
"loss": 0.1955, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5038759689922481, |
|
"grad_norm": 8.963953018188477, |
|
"learning_rate": 2.7586206896551727e-05, |
|
"loss": 0.2536, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5232558139534884, |
|
"grad_norm": 11.374913215637207, |
|
"learning_rate": 2.650862068965517e-05, |
|
"loss": 0.2592, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5426356589147286, |
|
"grad_norm": 7.7792768478393555, |
|
"learning_rate": 2.543103448275862e-05, |
|
"loss": 0.2024, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.562015503875969, |
|
"grad_norm": 14.774372100830078, |
|
"learning_rate": 2.435344827586207e-05, |
|
"loss": 0.228, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5813953488372093, |
|
"grad_norm": 8.32029914855957, |
|
"learning_rate": 2.327586206896552e-05, |
|
"loss": 0.1849, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6007751937984496, |
|
"grad_norm": 14.661565780639648, |
|
"learning_rate": 2.2198275862068967e-05, |
|
"loss": 0.1835, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6201550387596899, |
|
"grad_norm": 15.804061889648438, |
|
"learning_rate": 2.1120689655172415e-05, |
|
"loss": 0.1828, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6395348837209303, |
|
"grad_norm": 19.60464859008789, |
|
"learning_rate": 2.0043103448275863e-05, |
|
"loss": 0.1874, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6589147286821705, |
|
"grad_norm": 5.921483516693115, |
|
"learning_rate": 1.896551724137931e-05, |
|
"loss": 0.1741, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6782945736434108, |
|
"grad_norm": 8.562132835388184, |
|
"learning_rate": 1.7887931034482762e-05, |
|
"loss": 0.2541, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6976744186046512, |
|
"grad_norm": 7.037123680114746, |
|
"learning_rate": 1.6810344827586207e-05, |
|
"loss": 0.2101, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7170542635658915, |
|
"grad_norm": 4.712380886077881, |
|
"learning_rate": 1.5732758620689655e-05, |
|
"loss": 0.2088, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7364341085271318, |
|
"grad_norm": 5.721762180328369, |
|
"learning_rate": 1.4655172413793103e-05, |
|
"loss": 0.2245, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7558139534883721, |
|
"grad_norm": 8.76972484588623, |
|
"learning_rate": 1.3577586206896553e-05, |
|
"loss": 0.1713, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7751937984496124, |
|
"grad_norm": 8.067720413208008, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.1822, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7945736434108527, |
|
"grad_norm": 8.3314208984375, |
|
"learning_rate": 1.1422413793103449e-05, |
|
"loss": 0.1781, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.813953488372093, |
|
"grad_norm": 22.368515014648438, |
|
"learning_rate": 1.0344827586206897e-05, |
|
"loss": 0.1921, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 11.097426414489746, |
|
"learning_rate": 9.267241379310346e-06, |
|
"loss": 0.1627, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8527131782945736, |
|
"grad_norm": 20.73263168334961, |
|
"learning_rate": 8.189655172413793e-06, |
|
"loss": 0.1884, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.872093023255814, |
|
"grad_norm": 2.5283663272857666, |
|
"learning_rate": 7.112068965517242e-06, |
|
"loss": 0.2005, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8914728682170543, |
|
"grad_norm": 4.581237316131592, |
|
"learning_rate": 6.03448275862069e-06, |
|
"loss": 0.1945, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9108527131782945, |
|
"grad_norm": 4.513861656188965, |
|
"learning_rate": 4.9568965517241384e-06, |
|
"loss": 0.1427, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 13.439146041870117, |
|
"learning_rate": 3.8793103448275865e-06, |
|
"loss": 0.1233, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9496124031007752, |
|
"grad_norm": 9.163981437683105, |
|
"learning_rate": 2.8017241379310345e-06, |
|
"loss": 0.1171, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9689922480620154, |
|
"grad_norm": 6.725685119628906, |
|
"learning_rate": 1.724137931034483e-06, |
|
"loss": 0.2016, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9883720930232558, |
|
"grad_norm": 5.155745029449463, |
|
"learning_rate": 6.46551724137931e-07, |
|
"loss": 0.103, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9777131782945736, |
|
"eval_loss": 0.06364382058382034, |
|
"eval_runtime": 43.5867, |
|
"eval_samples_per_second": 94.708, |
|
"eval_steps_per_second": 11.838, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 516, |
|
"total_flos": 5.373364651307827e+17, |
|
"train_loss": 0.5084914574327395, |
|
"train_runtime": 598.2279, |
|
"train_samples_per_second": 27.598, |
|
"train_steps_per_second": 0.863 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 516, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.373364651307827e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|