|
{ |
|
"best_metric": 73.27647476901208, |
|
"best_model_checkpoint": "/scratch/mrahma45/pixel/finetuned_models/canine/bert-base-finetuned-parsing-ud-Chinese-GSD/checkpoint-14000", |
|
"epoch": 120.0, |
|
"global_step": 15000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 4.5737, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 7.948456375838927e-05, |
|
"loss": 2.1245, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 7.894765100671142e-05, |
|
"loss": 1.597, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 7.841073825503357e-05, |
|
"loss": 1.3112, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 7.78738255033557e-05, |
|
"loss": 1.101, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_las": 64.06064913527601, |
|
"eval_loss": 1.3798493146896362, |
|
"eval_runtime": 3.307, |
|
"eval_samples_per_second": 151.193, |
|
"eval_steps_per_second": 19.05, |
|
"eval_uas": 69.56487404248598, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 7.733691275167786e-05, |
|
"loss": 0.8747, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 0.7512, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 7.626308724832216e-05, |
|
"loss": 0.6672, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 7.57261744966443e-05, |
|
"loss": 0.5748, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 7.518926174496645e-05, |
|
"loss": 0.5202, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_las": 68.13551291163232, |
|
"eval_loss": 1.5277836322784424, |
|
"eval_runtime": 3.3863, |
|
"eval_samples_per_second": 147.653, |
|
"eval_steps_per_second": 18.604, |
|
"eval_uas": 73.33965095159125, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 7.46523489932886e-05, |
|
"loss": 0.4395, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 7.411543624161075e-05, |
|
"loss": 0.3923, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 7.35785234899329e-05, |
|
"loss": 0.3607, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 7.304161073825505e-05, |
|
"loss": 0.3235, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 7.25046979865772e-05, |
|
"loss": 0.3021, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_las": 69.40693358603806, |
|
"eval_loss": 1.897964596748352, |
|
"eval_runtime": 3.2504, |
|
"eval_samples_per_second": 153.829, |
|
"eval_steps_per_second": 19.382, |
|
"eval_uas": 74.69793887704336, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 7.196778523489934e-05, |
|
"loss": 0.2557, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 7.143087248322148e-05, |
|
"loss": 0.2355, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 7.089395973154363e-05, |
|
"loss": 0.2258, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 7.035704697986578e-05, |
|
"loss": 0.2039, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 6.982013422818792e-05, |
|
"loss": 0.1922, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_las": 70.1334596856985, |
|
"eval_loss": 2.0691661834716797, |
|
"eval_runtime": 3.237, |
|
"eval_samples_per_second": 154.463, |
|
"eval_steps_per_second": 19.462, |
|
"eval_uas": 75.53502329621733, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 16.8, |
|
"learning_rate": 6.928322147651007e-05, |
|
"loss": 0.1741, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 6.874630872483222e-05, |
|
"loss": 0.1636, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 6.820939597315437e-05, |
|
"loss": 0.1527, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 6.767248322147652e-05, |
|
"loss": 0.1474, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 6.713557046979866e-05, |
|
"loss": 0.138, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_las": 70.16504777698808, |
|
"eval_loss": 2.318901777267456, |
|
"eval_runtime": 3.2483, |
|
"eval_samples_per_second": 153.926, |
|
"eval_steps_per_second": 19.395, |
|
"eval_uas": 75.54292031903972, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"learning_rate": 6.659865771812081e-05, |
|
"loss": 0.1252, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"learning_rate": 6.606174496644296e-05, |
|
"loss": 0.1177, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"learning_rate": 6.55248322147651e-05, |
|
"loss": 0.1131, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 23.2, |
|
"learning_rate": 6.498791946308724e-05, |
|
"loss": 0.108, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 6.445100671140939e-05, |
|
"loss": 0.1055, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_las": 70.98633815051726, |
|
"eval_loss": 2.5012738704681396, |
|
"eval_runtime": 3.232, |
|
"eval_samples_per_second": 154.704, |
|
"eval_steps_per_second": 19.493, |
|
"eval_uas": 75.92987443733712, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 24.8, |
|
"learning_rate": 6.391409395973154e-05, |
|
"loss": 0.096, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 25.6, |
|
"learning_rate": 6.337718120805369e-05, |
|
"loss": 0.0935, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 26.4, |
|
"learning_rate": 6.284026845637584e-05, |
|
"loss": 0.0857, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 27.2, |
|
"learning_rate": 6.230335570469799e-05, |
|
"loss": 0.0838, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"learning_rate": 6.176644295302013e-05, |
|
"loss": 0.0798, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_las": 71.54702677090737, |
|
"eval_loss": 2.6194210052490234, |
|
"eval_runtime": 3.2513, |
|
"eval_samples_per_second": 153.784, |
|
"eval_steps_per_second": 19.377, |
|
"eval_uas": 76.27734344152255, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 28.8, |
|
"learning_rate": 6.122953020134228e-05, |
|
"loss": 0.0756, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 29.6, |
|
"learning_rate": 6.069261744966444e-05, |
|
"loss": 0.0724, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 30.4, |
|
"learning_rate": 6.0155704697986585e-05, |
|
"loss": 0.0708, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 31.2, |
|
"learning_rate": 5.9618791946308734e-05, |
|
"loss": 0.0682, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"learning_rate": 5.9081879194630875e-05, |
|
"loss": 0.0646, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_las": 70.67835426044381, |
|
"eval_loss": 2.809868097305298, |
|
"eval_runtime": 3.2209, |
|
"eval_samples_per_second": 155.237, |
|
"eval_steps_per_second": 19.56, |
|
"eval_uas": 75.81141909500118, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 32.8, |
|
"learning_rate": 5.854496644295302e-05, |
|
"loss": 0.0627, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 33.6, |
|
"learning_rate": 5.800805369127517e-05, |
|
"loss": 0.0612, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 34.4, |
|
"learning_rate": 5.747114093959732e-05, |
|
"loss": 0.0569, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 35.2, |
|
"learning_rate": 5.693422818791947e-05, |
|
"loss": 0.0568, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"learning_rate": 5.6397315436241616e-05, |
|
"loss": 0.0535, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_las": 71.18376372107716, |
|
"eval_loss": 2.8807671070098877, |
|
"eval_runtime": 3.2489, |
|
"eval_samples_per_second": 153.901, |
|
"eval_steps_per_second": 19.391, |
|
"eval_uas": 76.31682855563453, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 36.8, |
|
"learning_rate": 5.5860402684563764e-05, |
|
"loss": 0.0503, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 37.6, |
|
"learning_rate": 5.532348993288591e-05, |
|
"loss": 0.0484, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 38.4, |
|
"learning_rate": 5.478657718120806e-05, |
|
"loss": 0.0462, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 39.2, |
|
"learning_rate": 5.424966442953021e-05, |
|
"loss": 0.045, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 5.371275167785236e-05, |
|
"loss": 0.0465, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_las": 71.76024638711206, |
|
"eval_loss": 3.0303280353546143, |
|
"eval_runtime": 3.2476, |
|
"eval_samples_per_second": 153.959, |
|
"eval_steps_per_second": 19.399, |
|
"eval_uas": 76.87751717602464, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 40.8, |
|
"learning_rate": 5.31758389261745e-05, |
|
"loss": 0.0424, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 41.6, |
|
"learning_rate": 5.263892617449665e-05, |
|
"loss": 0.0411, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 42.4, |
|
"learning_rate": 5.2102013422818795e-05, |
|
"loss": 0.0397, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 43.2, |
|
"learning_rate": 5.1565100671140944e-05, |
|
"loss": 0.0391, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"learning_rate": 5.102818791946309e-05, |
|
"loss": 0.0368, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"eval_las": 71.96556898049435, |
|
"eval_loss": 3.1702797412872314, |
|
"eval_runtime": 3.2488, |
|
"eval_samples_per_second": 153.903, |
|
"eval_steps_per_second": 19.392, |
|
"eval_uas": 77.01176656400537, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 44.8, |
|
"learning_rate": 5.049127516778524e-05, |
|
"loss": 0.0368, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 45.6, |
|
"learning_rate": 4.995436241610739e-05, |
|
"loss": 0.0353, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 46.4, |
|
"learning_rate": 4.941744966442954e-05, |
|
"loss": 0.0348, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 47.2, |
|
"learning_rate": 4.8880536912751685e-05, |
|
"loss": 0.0346, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"learning_rate": 4.834362416107383e-05, |
|
"loss": 0.0313, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"eval_las": 71.61809997630894, |
|
"eval_loss": 3.2243900299072266, |
|
"eval_runtime": 3.2269, |
|
"eval_samples_per_second": 154.949, |
|
"eval_steps_per_second": 19.524, |
|
"eval_uas": 76.7669588565111, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 48.8, |
|
"learning_rate": 4.780671140939598e-05, |
|
"loss": 0.0297, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 49.6, |
|
"learning_rate": 4.726979865771813e-05, |
|
"loss": 0.032, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 50.4, |
|
"learning_rate": 4.673288590604027e-05, |
|
"loss": 0.0302, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 51.2, |
|
"learning_rate": 4.619597315436242e-05, |
|
"loss": 0.0301, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"learning_rate": 4.565906040268457e-05, |
|
"loss": 0.0296, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 52.0, |
|
"eval_las": 71.74445234146727, |
|
"eval_loss": 3.1685025691986084, |
|
"eval_runtime": 3.2336, |
|
"eval_samples_per_second": 154.625, |
|
"eval_steps_per_second": 19.483, |
|
"eval_uas": 76.71957671957672, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 52.8, |
|
"learning_rate": 4.5122147651006716e-05, |
|
"loss": 0.0272, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 53.6, |
|
"learning_rate": 4.4585234899328864e-05, |
|
"loss": 0.0275, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 54.4, |
|
"learning_rate": 4.404832214765101e-05, |
|
"loss": 0.0269, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 55.2, |
|
"learning_rate": 4.351140939597316e-05, |
|
"loss": 0.026, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"learning_rate": 4.297449664429531e-05, |
|
"loss": 0.0262, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"eval_las": 72.02084814025113, |
|
"eval_loss": 3.4257876873016357, |
|
"eval_runtime": 3.2313, |
|
"eval_samples_per_second": 154.737, |
|
"eval_steps_per_second": 19.497, |
|
"eval_uas": 77.12232488351891, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 56.8, |
|
"learning_rate": 4.243758389261746e-05, |
|
"loss": 0.0221, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 57.6, |
|
"learning_rate": 4.1900671140939605e-05, |
|
"loss": 0.021, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 58.4, |
|
"learning_rate": 4.1363758389261754e-05, |
|
"loss": 0.0215, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 59.2, |
|
"learning_rate": 4.0826845637583895e-05, |
|
"loss": 0.0226, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"learning_rate": 4.028993288590604e-05, |
|
"loss": 0.0233, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"eval_las": 71.80762852404644, |
|
"eval_loss": 3.463973045349121, |
|
"eval_runtime": 3.2516, |
|
"eval_samples_per_second": 153.771, |
|
"eval_steps_per_second": 19.375, |
|
"eval_uas": 76.79854694780069, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 60.8, |
|
"learning_rate": 3.975302013422819e-05, |
|
"loss": 0.0213, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 61.6, |
|
"learning_rate": 3.921610738255034e-05, |
|
"loss": 0.02, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 62.4, |
|
"learning_rate": 3.867919463087249e-05, |
|
"loss": 0.0204, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 63.2, |
|
"learning_rate": 3.8142281879194636e-05, |
|
"loss": 0.0197, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"learning_rate": 3.7605369127516784e-05, |
|
"loss": 0.0193, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"eval_las": 71.878701729448, |
|
"eval_loss": 3.574808120727539, |
|
"eval_runtime": 3.2387, |
|
"eval_samples_per_second": 154.383, |
|
"eval_steps_per_second": 19.452, |
|
"eval_uas": 77.03545763247256, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 64.8, |
|
"learning_rate": 3.706845637583893e-05, |
|
"loss": 0.0174, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 65.6, |
|
"learning_rate": 3.653154362416108e-05, |
|
"loss": 0.0183, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 66.4, |
|
"learning_rate": 3.599463087248322e-05, |
|
"loss": 0.0193, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 67.2, |
|
"learning_rate": 3.545771812080537e-05, |
|
"loss": 0.017, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"learning_rate": 3.492080536912752e-05, |
|
"loss": 0.0161, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 68.0, |
|
"eval_las": 71.98136302613915, |
|
"eval_loss": 3.7389907836914062, |
|
"eval_runtime": 3.2198, |
|
"eval_samples_per_second": 155.288, |
|
"eval_steps_per_second": 19.566, |
|
"eval_uas": 76.91700229013662, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 68.8, |
|
"learning_rate": 3.438389261744967e-05, |
|
"loss": 0.0165, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 69.6, |
|
"learning_rate": 3.3846979865771815e-05, |
|
"loss": 0.0157, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 70.4, |
|
"learning_rate": 3.3310067114093964e-05, |
|
"loss": 0.014, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 71.2, |
|
"learning_rate": 3.277315436241611e-05, |
|
"loss": 0.0144, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"learning_rate": 3.223624161073826e-05, |
|
"loss": 0.0149, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"eval_las": 72.36831714443655, |
|
"eval_loss": 3.806819438934326, |
|
"eval_runtime": 3.2367, |
|
"eval_samples_per_second": 154.477, |
|
"eval_steps_per_second": 19.464, |
|
"eval_uas": 77.35133854536839, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 72.8, |
|
"learning_rate": 3.169932885906041e-05, |
|
"loss": 0.0159, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 73.6, |
|
"learning_rate": 3.1162416107382557e-05, |
|
"loss": 0.0126, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 74.4, |
|
"learning_rate": 3.0625503355704705e-05, |
|
"loss": 0.0128, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 75.2, |
|
"learning_rate": 3.0088590604026846e-05, |
|
"loss": 0.0143, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"learning_rate": 2.9551677852348995e-05, |
|
"loss": 0.0121, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"eval_las": 72.39990523572614, |
|
"eval_loss": 3.9205520153045654, |
|
"eval_runtime": 3.2187, |
|
"eval_samples_per_second": 155.341, |
|
"eval_steps_per_second": 19.573, |
|
"eval_uas": 77.27236831714444, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 76.8, |
|
"learning_rate": 2.9014765100671143e-05, |
|
"loss": 0.012, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 77.6, |
|
"learning_rate": 2.847785234899329e-05, |
|
"loss": 0.0124, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 78.4, |
|
"learning_rate": 2.794093959731544e-05, |
|
"loss": 0.0109, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 79.2, |
|
"learning_rate": 2.7404026845637588e-05, |
|
"loss": 0.0112, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"learning_rate": 2.6867114093959732e-05, |
|
"loss": 0.0113, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"eval_las": 72.17878859669904, |
|
"eval_loss": 3.9717860221862793, |
|
"eval_runtime": 3.2195, |
|
"eval_samples_per_second": 155.302, |
|
"eval_steps_per_second": 19.568, |
|
"eval_uas": 77.15391297480849, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 80.8, |
|
"learning_rate": 2.633020134228188e-05, |
|
"loss": 0.0118, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 81.6, |
|
"learning_rate": 2.579328859060403e-05, |
|
"loss": 0.0102, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 82.4, |
|
"learning_rate": 2.5256375838926177e-05, |
|
"loss": 0.0112, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 83.2, |
|
"learning_rate": 2.4719463087248325e-05, |
|
"loss": 0.0109, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"learning_rate": 2.4182550335570474e-05, |
|
"loss": 0.0086, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 84.0, |
|
"eval_las": 72.4709784411277, |
|
"eval_loss": 4.107804775238037, |
|
"eval_runtime": 3.2109, |
|
"eval_samples_per_second": 155.72, |
|
"eval_steps_per_second": 19.621, |
|
"eval_uas": 77.41451472794756, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 84.8, |
|
"learning_rate": 2.364563758389262e-05, |
|
"loss": 0.0094, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 85.6, |
|
"learning_rate": 2.3108724832214767e-05, |
|
"loss": 0.009, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 86.4, |
|
"learning_rate": 2.2571812080536915e-05, |
|
"loss": 0.0078, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 87.2, |
|
"learning_rate": 2.2034899328859063e-05, |
|
"loss": 0.0082, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"learning_rate": 2.149798657718121e-05, |
|
"loss": 0.0091, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"eval_las": 72.34462607596936, |
|
"eval_loss": 4.17307710647583, |
|
"eval_runtime": 3.2189, |
|
"eval_samples_per_second": 155.332, |
|
"eval_steps_per_second": 19.572, |
|
"eval_uas": 77.18550106609808, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 88.8, |
|
"learning_rate": 2.0961073825503356e-05, |
|
"loss": 0.008, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 89.6, |
|
"learning_rate": 2.0424161073825505e-05, |
|
"loss": 0.0079, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 90.4, |
|
"learning_rate": 1.9887248322147653e-05, |
|
"loss": 0.0073, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 91.2, |
|
"learning_rate": 1.93503355704698e-05, |
|
"loss": 0.0075, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"learning_rate": 1.881342281879195e-05, |
|
"loss": 0.0072, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 92.0, |
|
"eval_las": 72.58153676064124, |
|
"eval_loss": 4.29336404800415, |
|
"eval_runtime": 3.2461, |
|
"eval_samples_per_second": 154.032, |
|
"eval_steps_per_second": 19.408, |
|
"eval_uas": 77.58824923004028, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 92.8, |
|
"learning_rate": 1.8276510067114094e-05, |
|
"loss": 0.0066, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 93.6, |
|
"learning_rate": 1.7739597315436242e-05, |
|
"loss": 0.0065, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 94.4, |
|
"learning_rate": 1.720268456375839e-05, |
|
"loss": 0.0068, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 95.2, |
|
"learning_rate": 1.666577181208054e-05, |
|
"loss": 0.0063, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"learning_rate": 1.6128859060402687e-05, |
|
"loss": 0.0058, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"eval_las": 72.78685935402353, |
|
"eval_loss": 4.2894287109375, |
|
"eval_runtime": 3.1913, |
|
"eval_samples_per_second": 156.677, |
|
"eval_steps_per_second": 19.741, |
|
"eval_uas": 77.70670457237621, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 96.8, |
|
"learning_rate": 1.5591946308724832e-05, |
|
"loss": 0.0053, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 97.6, |
|
"learning_rate": 1.505503355704698e-05, |
|
"loss": 0.0058, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 98.4, |
|
"learning_rate": 1.4518120805369128e-05, |
|
"loss": 0.0055, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 99.2, |
|
"learning_rate": 1.3981208053691275e-05, |
|
"loss": 0.0061, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"learning_rate": 1.3444295302013423e-05, |
|
"loss": 0.0058, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"eval_las": 72.82634446813552, |
|
"eval_loss": 4.251532554626465, |
|
"eval_runtime": 3.1899, |
|
"eval_samples_per_second": 156.746, |
|
"eval_steps_per_second": 19.75, |
|
"eval_uas": 77.7461896864882, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 100.8, |
|
"learning_rate": 1.2912751677852352e-05, |
|
"loss": 0.0048, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 101.6, |
|
"learning_rate": 1.2375838926174497e-05, |
|
"loss": 0.0044, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 102.4, |
|
"learning_rate": 1.1838926174496645e-05, |
|
"loss": 0.0054, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 103.2, |
|
"learning_rate": 1.1302013422818795e-05, |
|
"loss": 0.0048, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 104.0, |
|
"learning_rate": 1.076510067114094e-05, |
|
"loss": 0.0052, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 104.0, |
|
"eval_las": 72.66050698886521, |
|
"eval_loss": 4.392012596130371, |
|
"eval_runtime": 3.19, |
|
"eval_samples_per_second": 156.741, |
|
"eval_steps_per_second": 19.749, |
|
"eval_uas": 77.46979388770433, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 104.8, |
|
"learning_rate": 1.0228187919463088e-05, |
|
"loss": 0.005, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 105.6, |
|
"learning_rate": 9.691275167785236e-06, |
|
"loss": 0.0043, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 106.4, |
|
"learning_rate": 9.154362416107383e-06, |
|
"loss": 0.0039, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 107.2, |
|
"learning_rate": 8.617449664429531e-06, |
|
"loss": 0.0035, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 108.0, |
|
"learning_rate": 8.085906040268458e-06, |
|
"loss": 0.0046, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 108.0, |
|
"eval_las": 72.97638790176103, |
|
"eval_loss": 4.4607110023498535, |
|
"eval_runtime": 3.1889, |
|
"eval_samples_per_second": 156.795, |
|
"eval_steps_per_second": 19.756, |
|
"eval_uas": 77.83305693753455, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 108.8, |
|
"learning_rate": 7.548993288590605e-06, |
|
"loss": 0.0038, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 109.6, |
|
"learning_rate": 7.012080536912752e-06, |
|
"loss": 0.0039, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 110.4, |
|
"learning_rate": 6.475167785234899e-06, |
|
"loss": 0.0034, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 111.2, |
|
"learning_rate": 5.938255033557048e-06, |
|
"loss": 0.0034, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 112.0, |
|
"learning_rate": 5.401342281879195e-06, |
|
"loss": 0.003, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 112.0, |
|
"eval_las": 73.27647476901208, |
|
"eval_loss": 4.440804481506348, |
|
"eval_runtime": 3.1908, |
|
"eval_samples_per_second": 156.703, |
|
"eval_steps_per_second": 19.745, |
|
"eval_uas": 77.99889441680486, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 112.8, |
|
"learning_rate": 4.864429530201343e-06, |
|
"loss": 0.0035, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 113.6, |
|
"learning_rate": 4.32751677852349e-06, |
|
"loss": 0.0031, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 114.4, |
|
"learning_rate": 3.790604026845638e-06, |
|
"loss": 0.0029, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 115.2, |
|
"learning_rate": 3.253691275167786e-06, |
|
"loss": 0.0035, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 116.0, |
|
"learning_rate": 2.716778523489933e-06, |
|
"loss": 0.0027, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 116.0, |
|
"eval_las": 73.2448866777225, |
|
"eval_loss": 4.51676607131958, |
|
"eval_runtime": 3.1897, |
|
"eval_samples_per_second": 156.756, |
|
"eval_steps_per_second": 19.751, |
|
"eval_uas": 77.91202716575852, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 116.8, |
|
"learning_rate": 2.1798657718120807e-06, |
|
"loss": 0.0026, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 117.6, |
|
"learning_rate": 1.6429530201342283e-06, |
|
"loss": 0.0028, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 118.4, |
|
"learning_rate": 1.1060402684563759e-06, |
|
"loss": 0.003, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 119.2, |
|
"learning_rate": 5.691275167785235e-07, |
|
"loss": 0.0035, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 120.0, |
|
"learning_rate": 3.2214765100671145e-08, |
|
"loss": 0.0032, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 120.0, |
|
"eval_las": 73.11853431256417, |
|
"eval_loss": 4.538000583648682, |
|
"eval_runtime": 3.1945, |
|
"eval_samples_per_second": 156.52, |
|
"eval_steps_per_second": 19.721, |
|
"eval_uas": 77.84095396035694, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 120.0, |
|
"step": 15000, |
|
"total_flos": 9.575030782291968e+16, |
|
"train_loss": 0.140908819069465, |
|
"train_runtime": 6658.0803, |
|
"train_samples_per_second": 72.093, |
|
"train_steps_per_second": 2.253 |
|
} |
|
], |
|
"max_steps": 15000, |
|
"num_train_epochs": 120, |
|
"total_flos": 9.575030782291968e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|