|
{ |
|
"best_metric": 0.32059717178344727, |
|
"best_model_checkpoint": "autotrain-z9wst-apic1/checkpoint-1506", |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1506, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.016600265604249667, |
|
"grad_norm": 0.7011755108833313, |
|
"learning_rate": 8.27814569536424e-06, |
|
"loss": 0.6938, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.033200531208499334, |
|
"grad_norm": 1.692274808883667, |
|
"learning_rate": 1.655629139072848e-05, |
|
"loss": 0.6645, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.049800796812749, |
|
"grad_norm": 2.9643406867980957, |
|
"learning_rate": 2.4834437086092715e-05, |
|
"loss": 0.4689, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06640106241699867, |
|
"grad_norm": 6.690465927124023, |
|
"learning_rate": 3.2450331125827816e-05, |
|
"loss": 0.411, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08300132802124834, |
|
"grad_norm": 4.3464460372924805, |
|
"learning_rate": 4.0728476821192055e-05, |
|
"loss": 0.3856, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.099601593625498, |
|
"grad_norm": 4.394780158996582, |
|
"learning_rate": 4.900662251655629e-05, |
|
"loss": 0.3458, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11620185922974767, |
|
"grad_norm": 4.849474906921387, |
|
"learning_rate": 4.9967485104175286e-05, |
|
"loss": 0.3469, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.13280212483399734, |
|
"grad_norm": 3.016519069671631, |
|
"learning_rate": 4.9851715045824994e-05, |
|
"loss": 0.3273, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14940239043824702, |
|
"grad_norm": 7.323078632354736, |
|
"learning_rate": 4.965247392483274e-05, |
|
"loss": 0.326, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.16600265604249667, |
|
"grad_norm": 1.702303171157837, |
|
"learning_rate": 4.937043094523778e-05, |
|
"loss": 0.3507, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.18260292164674635, |
|
"grad_norm": 3.314708709716797, |
|
"learning_rate": 4.900653342303719e-05, |
|
"loss": 0.3253, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.199203187250996, |
|
"grad_norm": 3.7664265632629395, |
|
"learning_rate": 4.8562003604374334e-05, |
|
"loss": 0.3223, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2158034528552457, |
|
"grad_norm": 2.8503146171569824, |
|
"learning_rate": 4.8038334560301714e-05, |
|
"loss": 0.3276, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.23240371845949534, |
|
"grad_norm": 3.164609909057617, |
|
"learning_rate": 4.7437285171906644e-05, |
|
"loss": 0.2694, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24900398406374502, |
|
"grad_norm": 5.1076531410217285, |
|
"learning_rate": 4.676087422264358e-05, |
|
"loss": 0.2641, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2656042496679947, |
|
"grad_norm": 3.561342477798462, |
|
"learning_rate": 4.60113736177156e-05, |
|
"loss": 0.3179, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2822045152722444, |
|
"grad_norm": 5.70325231552124, |
|
"learning_rate": 4.519130075327941e-05, |
|
"loss": 0.2602, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.29880478087649404, |
|
"grad_norm": 3.692572593688965, |
|
"learning_rate": 4.43034100611041e-05, |
|
"loss": 0.2565, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3154050464807437, |
|
"grad_norm": 3.2501657009124756, |
|
"learning_rate": 4.3350683757082966e-05, |
|
"loss": 0.2269, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.33200531208499334, |
|
"grad_norm": 4.347064971923828, |
|
"learning_rate": 4.233632182467218e-05, |
|
"loss": 0.2527, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34860557768924305, |
|
"grad_norm": 3.8920302391052246, |
|
"learning_rate": 4.126373126689931e-05, |
|
"loss": 0.2233, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3652058432934927, |
|
"grad_norm": 2.748621940612793, |
|
"learning_rate": 4.013651466304197e-05, |
|
"loss": 0.2574, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.38180610889774236, |
|
"grad_norm": 1.6398485898971558, |
|
"learning_rate": 3.8958458068411756e-05, |
|
"loss": 0.2627, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.398406374501992, |
|
"grad_norm": 3.143782377243042, |
|
"learning_rate": 3.773351829788543e-05, |
|
"loss": 0.2377, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4150066401062417, |
|
"grad_norm": 2.3850388526916504, |
|
"learning_rate": 3.646580963589486e-05, |
|
"loss": 0.2221, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4316069057104914, |
|
"grad_norm": 3.32765531539917, |
|
"learning_rate": 3.5159590017513796e-05, |
|
"loss": 0.2248, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.448207171314741, |
|
"grad_norm": 3.9372353553771973, |
|
"learning_rate": 3.381924672705598e-05, |
|
"loss": 0.2127, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4648074369189907, |
|
"grad_norm": 1.9122364521026611, |
|
"learning_rate": 3.2449281662219556e-05, |
|
"loss": 0.2034, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4814077025232404, |
|
"grad_norm": 2.339261054992676, |
|
"learning_rate": 3.105429621327208e-05, |
|
"loss": 0.2036, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.49800796812749004, |
|
"grad_norm": 2.478360652923584, |
|
"learning_rate": 2.9638975808063486e-05, |
|
"loss": 0.2134, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5146082337317397, |
|
"grad_norm": 3.1056790351867676, |
|
"learning_rate": 2.8208074174776444e-05, |
|
"loss": 0.2316, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5312084993359893, |
|
"grad_norm": 1.999221920967102, |
|
"learning_rate": 2.676639737527213e-05, |
|
"loss": 0.2325, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.547808764940239, |
|
"grad_norm": 1.5150188207626343, |
|
"learning_rate": 2.5318787662659414e-05, |
|
"loss": 0.1733, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5644090305444888, |
|
"grad_norm": 5.388316631317139, |
|
"learning_rate": 2.3870107217306106e-05, |
|
"loss": 0.193, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5810092961487384, |
|
"grad_norm": 2.7311172485351562, |
|
"learning_rate": 2.242522181591934e-05, |
|
"loss": 0.1616, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5976095617529881, |
|
"grad_norm": 1.276823878288269, |
|
"learning_rate": 2.0988984488546643e-05, |
|
"loss": 0.1526, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6142098273572377, |
|
"grad_norm": 1.8383485078811646, |
|
"learning_rate": 1.956621921838998e-05, |
|
"loss": 0.1881, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.6308100929614874, |
|
"grad_norm": 4.960041522979736, |
|
"learning_rate": 1.8161704739181486e-05, |
|
"loss": 0.1749, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.647410358565737, |
|
"grad_norm": 1.369192361831665, |
|
"learning_rate": 1.6780158484541295e-05, |
|
"loss": 0.1222, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6640106241699867, |
|
"grad_norm": 4.590680122375488, |
|
"learning_rate": 1.54262207432282e-05, |
|
"loss": 0.1664, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6806108897742363, |
|
"grad_norm": 1.1486464738845825, |
|
"learning_rate": 1.4104439073501647e-05, |
|
"loss": 0.1389, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6972111553784861, |
|
"grad_norm": 1.1831196546554565, |
|
"learning_rate": 1.2819253028943757e-05, |
|
"loss": 0.1941, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7138114209827358, |
|
"grad_norm": 1.529694676399231, |
|
"learning_rate": 1.1574979247043486e-05, |
|
"loss": 0.1479, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.7304116865869854, |
|
"grad_norm": 5.450222492218018, |
|
"learning_rate": 1.0375796950627137e-05, |
|
"loss": 0.1561, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7470119521912351, |
|
"grad_norm": 1.9736617803573608, |
|
"learning_rate": 9.225733910832388e-06, |
|
"loss": 0.1598, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7636122177954847, |
|
"grad_norm": 4.179745197296143, |
|
"learning_rate": 8.128652918772865e-06, |
|
"loss": 0.1353, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7802124833997344, |
|
"grad_norm": 5.7941083908081055, |
|
"learning_rate": 7.088238811331752e-06, |
|
"loss": 0.1358, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.796812749003984, |
|
"grad_norm": 2.463935136795044, |
|
"learning_rate": 6.1079860946618685e-06, |
|
"loss": 0.1337, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8134130146082338, |
|
"grad_norm": 1.1269705295562744, |
|
"learning_rate": 5.19118720696184e-06, |
|
"loss": 0.1538, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.8300132802124834, |
|
"grad_norm": 5.329429626464844, |
|
"learning_rate": 4.340921459950962e-06, |
|
"loss": 0.1323, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8466135458167331, |
|
"grad_norm": 6.623585224151611, |
|
"learning_rate": 3.56004469618578e-06, |
|
"loss": 0.1455, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8632138114209827, |
|
"grad_norm": 1.3128626346588135, |
|
"learning_rate": 2.851179696956993e-06, |
|
"loss": 0.1367, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8798140770252324, |
|
"grad_norm": 4.968562126159668, |
|
"learning_rate": 2.216707372984292e-06, |
|
"loss": 0.0909, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.896414342629482, |
|
"grad_norm": 1.0073035955429077, |
|
"learning_rate": 1.6587587674975424e-06, |
|
"loss": 0.1443, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9130146082337317, |
|
"grad_norm": 4.145277500152588, |
|
"learning_rate": 1.1792078985640525e-06, |
|
"loss": 0.138, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.9296148738379814, |
|
"grad_norm": 1.9500765800476074, |
|
"learning_rate": 7.796654647029e-07, |
|
"loss": 0.1265, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9462151394422311, |
|
"grad_norm": 1.5362143516540527, |
|
"learning_rate": 4.614734349276256e-07, |
|
"loss": 0.1555, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9628154050464808, |
|
"grad_norm": 2.4286270141601562, |
|
"learning_rate": 2.2570054138813246e-07, |
|
"loss": 0.1389, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9794156706507304, |
|
"grad_norm": 2.250825881958008, |
|
"learning_rate": 7.313868975087345e-08, |
|
"loss": 0.1173, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9960159362549801, |
|
"grad_norm": 0.48711511492729187, |
|
"learning_rate": 4.3002993740770546e-09, |
|
"loss": 0.1262, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9163208852005532, |
|
"eval_auc": 0.963556761725483, |
|
"eval_f1": 0.9162629757785468, |
|
"eval_loss": 0.32059717178344727, |
|
"eval_precision": 0.9168975069252078, |
|
"eval_recall": 0.9156293222683264, |
|
"eval_runtime": 39.0094, |
|
"eval_samples_per_second": 148.272, |
|
"eval_steps_per_second": 2.333, |
|
"step": 1506 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 1506, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.01 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6340037689147392.0, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|