|
{ |
|
"best_metric": 0.8673913043478261, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-teeth_dataset/checkpoint-67", |
|
"epoch": 40.0, |
|
"eval_steps": 500, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.8, |
|
"eval_accuracy": 0.7913043478260869, |
|
"eval_loss": 1.1515443325042725, |
|
"eval_runtime": 4.209, |
|
"eval_samples_per_second": 109.29, |
|
"eval_steps_per_second": 3.564, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"eval_accuracy": 0.7630434782608696, |
|
"eval_loss": 1.1545782089233398, |
|
"eval_runtime": 4.4373, |
|
"eval_samples_per_second": 103.665, |
|
"eval_steps_per_second": 3.38, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 12.2432222366333, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.6441, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"eval_accuracy": 0.8065217391304348, |
|
"eval_loss": 1.070844292640686, |
|
"eval_runtime": 4.2607, |
|
"eval_samples_per_second": 107.964, |
|
"eval_steps_per_second": 3.521, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.8217391304347826, |
|
"eval_loss": 0.992546796798706, |
|
"eval_runtime": 4.1431, |
|
"eval_samples_per_second": 111.027, |
|
"eval_steps_per_second": 3.62, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"eval_accuracy": 0.8108695652173913, |
|
"eval_loss": 0.99262535572052, |
|
"eval_runtime": 3.9996, |
|
"eval_samples_per_second": 115.012, |
|
"eval_steps_per_second": 3.75, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 12.744916915893555, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 0.6042, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"eval_accuracy": 0.7934782608695652, |
|
"eval_loss": 1.016530990600586, |
|
"eval_runtime": 4.0517, |
|
"eval_samples_per_second": 113.532, |
|
"eval_steps_per_second": 3.702, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"eval_accuracy": 0.8347826086956521, |
|
"eval_loss": 0.8679854273796082, |
|
"eval_runtime": 3.9943, |
|
"eval_samples_per_second": 115.163, |
|
"eval_steps_per_second": 3.755, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 23.645719528198242, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.4293, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.8347826086956521, |
|
"eval_loss": 0.8360960483551025, |
|
"eval_runtime": 4.0619, |
|
"eval_samples_per_second": 113.248, |
|
"eval_steps_per_second": 3.693, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"eval_accuracy": 0.8565217391304348, |
|
"eval_loss": 0.8075701594352722, |
|
"eval_runtime": 4.0291, |
|
"eval_samples_per_second": 114.17, |
|
"eval_steps_per_second": 3.723, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"eval_accuracy": 0.8565217391304348, |
|
"eval_loss": 0.7547765970230103, |
|
"eval_runtime": 4.0783, |
|
"eval_samples_per_second": 112.793, |
|
"eval_steps_per_second": 3.678, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"grad_norm": 20.158720016479492, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 0.4471, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"eval_accuracy": 0.8369565217391305, |
|
"eval_loss": 0.7901058793067932, |
|
"eval_runtime": 4.2465, |
|
"eval_samples_per_second": 108.325, |
|
"eval_steps_per_second": 3.532, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.8543478260869565, |
|
"eval_loss": 0.7326875925064087, |
|
"eval_runtime": 4.3562, |
|
"eval_samples_per_second": 105.597, |
|
"eval_steps_per_second": 3.443, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"eval_accuracy": 0.8478260869565217, |
|
"eval_loss": 0.748528003692627, |
|
"eval_runtime": 4.402, |
|
"eval_samples_per_second": 104.498, |
|
"eval_steps_per_second": 3.408, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"grad_norm": 11.568655967712402, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.3436, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"eval_accuracy": 0.8347826086956521, |
|
"eval_loss": 0.7844414710998535, |
|
"eval_runtime": 4.2227, |
|
"eval_samples_per_second": 108.934, |
|
"eval_steps_per_second": 3.552, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"eval_accuracy": 0.8630434782608696, |
|
"eval_loss": 0.6644619107246399, |
|
"eval_runtime": 4.1242, |
|
"eval_samples_per_second": 111.537, |
|
"eval_steps_per_second": 3.637, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 14.64358139038086, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.2813, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.8391304347826087, |
|
"eval_loss": 0.7370373010635376, |
|
"eval_runtime": 4.0228, |
|
"eval_samples_per_second": 114.348, |
|
"eval_steps_per_second": 3.729, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"eval_accuracy": 0.8347826086956521, |
|
"eval_loss": 0.7598132491111755, |
|
"eval_runtime": 3.9928, |
|
"eval_samples_per_second": 115.208, |
|
"eval_steps_per_second": 3.757, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 17.87, |
|
"eval_accuracy": 0.8673913043478261, |
|
"eval_loss": 0.6579979062080383, |
|
"eval_runtime": 4.1194, |
|
"eval_samples_per_second": 111.667, |
|
"eval_steps_per_second": 3.641, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"grad_norm": 16.545446395874023, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 0.275, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 18.93, |
|
"eval_accuracy": 0.8456521739130435, |
|
"eval_loss": 0.7026421427726746, |
|
"eval_runtime": 4.039, |
|
"eval_samples_per_second": 113.889, |
|
"eval_steps_per_second": 3.714, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.8369565217391305, |
|
"eval_loss": 0.7055774927139282, |
|
"eval_runtime": 4.0414, |
|
"eval_samples_per_second": 113.823, |
|
"eval_steps_per_second": 3.712, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"eval_accuracy": 0.8543478260869565, |
|
"eval_loss": 0.6517285108566284, |
|
"eval_runtime": 4.1333, |
|
"eval_samples_per_second": 111.292, |
|
"eval_steps_per_second": 3.629, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 21.33, |
|
"grad_norm": 13.078596115112305, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 0.2674, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 21.87, |
|
"eval_accuracy": 0.8608695652173913, |
|
"eval_loss": 0.5930755734443665, |
|
"eval_runtime": 4.2994, |
|
"eval_samples_per_second": 106.991, |
|
"eval_steps_per_second": 3.489, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 22.93, |
|
"eval_accuracy": 0.8608695652173913, |
|
"eval_loss": 0.5985938310623169, |
|
"eval_runtime": 4.3353, |
|
"eval_samples_per_second": 106.105, |
|
"eval_steps_per_second": 3.46, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"grad_norm": 8.3045015335083, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.2278, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy": 0.8608695652173913, |
|
"eval_loss": 0.5982087254524231, |
|
"eval_runtime": 4.3231, |
|
"eval_samples_per_second": 106.404, |
|
"eval_steps_per_second": 3.47, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 24.8, |
|
"eval_accuracy": 0.8543478260869565, |
|
"eval_loss": 0.604324460029602, |
|
"eval_runtime": 4.0948, |
|
"eval_samples_per_second": 112.337, |
|
"eval_steps_per_second": 3.663, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 25.87, |
|
"eval_accuracy": 0.8521739130434782, |
|
"eval_loss": 0.6148393750190735, |
|
"eval_runtime": 4.0061, |
|
"eval_samples_per_second": 114.824, |
|
"eval_steps_per_second": 3.744, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 26.67, |
|
"grad_norm": 15.322602272033691, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.2034, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 26.93, |
|
"eval_accuracy": 0.85, |
|
"eval_loss": 0.6160370111465454, |
|
"eval_runtime": 4.0736, |
|
"eval_samples_per_second": 112.922, |
|
"eval_steps_per_second": 3.682, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_accuracy": 0.8521739130434782, |
|
"eval_loss": 0.5903608798980713, |
|
"eval_runtime": 4.1333, |
|
"eval_samples_per_second": 111.291, |
|
"eval_steps_per_second": 3.629, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 28.8, |
|
"eval_accuracy": 0.8608695652173913, |
|
"eval_loss": 0.5824848413467407, |
|
"eval_runtime": 4.2951, |
|
"eval_samples_per_second": 107.1, |
|
"eval_steps_per_second": 3.492, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 29.33, |
|
"grad_norm": 12.949268341064453, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.2207, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 29.87, |
|
"eval_accuracy": 0.8478260869565217, |
|
"eval_loss": 0.6132237911224365, |
|
"eval_runtime": 4.382, |
|
"eval_samples_per_second": 104.975, |
|
"eval_steps_per_second": 3.423, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 30.93, |
|
"eval_accuracy": 0.8391304347826087, |
|
"eval_loss": 0.6181351542472839, |
|
"eval_runtime": 4.27, |
|
"eval_samples_per_second": 107.728, |
|
"eval_steps_per_second": 3.513, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"grad_norm": 5.803860664367676, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.1701, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_accuracy": 0.85, |
|
"eval_loss": 0.5688785910606384, |
|
"eval_runtime": 4.1338, |
|
"eval_samples_per_second": 111.279, |
|
"eval_steps_per_second": 3.629, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 32.8, |
|
"eval_accuracy": 0.8608695652173913, |
|
"eval_loss": 0.5633876919746399, |
|
"eval_runtime": 4.7704, |
|
"eval_samples_per_second": 96.428, |
|
"eval_steps_per_second": 3.144, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 33.87, |
|
"eval_accuracy": 0.8652173913043478, |
|
"eval_loss": 0.5659446120262146, |
|
"eval_runtime": 4.0029, |
|
"eval_samples_per_second": 114.915, |
|
"eval_steps_per_second": 3.747, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 34.67, |
|
"grad_norm": 18.19119644165039, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.1681, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 34.93, |
|
"eval_accuracy": 0.8565217391304348, |
|
"eval_loss": 0.5895132422447205, |
|
"eval_runtime": 4.0203, |
|
"eval_samples_per_second": 114.42, |
|
"eval_steps_per_second": 3.731, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_accuracy": 0.85, |
|
"eval_loss": 0.6094810366630554, |
|
"eval_runtime": 4.1594, |
|
"eval_samples_per_second": 110.594, |
|
"eval_steps_per_second": 3.606, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 36.8, |
|
"eval_accuracy": 0.8456521739130435, |
|
"eval_loss": 0.614500105381012, |
|
"eval_runtime": 4.3427, |
|
"eval_samples_per_second": 105.924, |
|
"eval_steps_per_second": 3.454, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 37.33, |
|
"grad_norm": 7.945148468017578, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.1435, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 37.87, |
|
"eval_accuracy": 0.8478260869565217, |
|
"eval_loss": 0.6111825704574585, |
|
"eval_runtime": 4.3974, |
|
"eval_samples_per_second": 104.606, |
|
"eval_steps_per_second": 3.411, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 38.93, |
|
"eval_accuracy": 0.8456521739130435, |
|
"eval_loss": 0.599669873714447, |
|
"eval_runtime": 4.2726, |
|
"eval_samples_per_second": 107.662, |
|
"eval_steps_per_second": 3.511, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 11.281535148620605, |
|
"learning_rate": 0.0, |
|
"loss": 0.1586, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_accuracy": 0.85, |
|
"eval_loss": 0.5965761542320251, |
|
"eval_runtime": 4.0634, |
|
"eval_samples_per_second": 113.205, |
|
"eval_steps_per_second": 3.691, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"step": 150, |
|
"total_flos": 4.585003492737024e+17, |
|
"train_loss": 0.305610773563385, |
|
"train_runtime": 583.5167, |
|
"train_samples_per_second": 39.416, |
|
"train_steps_per_second": 0.257 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 150, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 500, |
|
"total_flos": 4.585003492737024e+17, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|