|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 570, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 1.0367, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 1.098, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.9615, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.8549, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.7821, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.7201, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.7456, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5e-05, |
|
"loss": 0.7142, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.998922515567496e-05, |
|
"loss": 0.7209, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.995690991048146e-05, |
|
"loss": 0.6287, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.99030821197584e-05, |
|
"loss": 0.5931, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.982778818239101e-05, |
|
"loss": 0.3712, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.8771697070204532, |
|
"eval_loss": 0.31964054703712463, |
|
"eval_runtime": 0.7969, |
|
"eval_samples_per_second": 126.74, |
|
"eval_steps_per_second": 16.313, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.97310930008156e-05, |
|
"loss": 0.2417, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.961307992507443e-05, |
|
"loss": 0.479, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.947385068096907e-05, |
|
"loss": 0.3856, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.9313525282373974e-05, |
|
"loss": 0.4473, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.9132241927786035e-05, |
|
"loss": 0.3996, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.893015688119921e-05, |
|
"loss": 0.3493, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.870744433740688e-05, |
|
"loss": 0.2517, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.8464296271848155e-05, |
|
"loss": 0.2712, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.8200922275127355e-05, |
|
"loss": 0.1581, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.7917549372349616e-05, |
|
"loss": 0.4589, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.761442182742799e-05, |
|
"loss": 0.1617, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_f1": 0.892094017094017, |
|
"eval_loss": 0.29778197407722473, |
|
"eval_runtime": 0.4586, |
|
"eval_samples_per_second": 220.238, |
|
"eval_steps_per_second": 28.347, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.7291800932531064e-05, |
|
"loss": 0.327, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.694996478285231e-05, |
|
"loss": 0.1373, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.6589208036895535e-05, |
|
"loss": 0.1583, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.620984166248288e-05, |
|
"loss": 0.1642, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.581219266870446e-05, |
|
"loss": 0.126, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.53966038240406e-05, |
|
"loss": 0.1036, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.496343336089965e-05, |
|
"loss": 0.0361, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.4513054666826146e-05, |
|
"loss": 0.1544, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.4045855962645363e-05, |
|
"loss": 0.0069, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.3562239967821805e-05, |
|
"loss": 0.1332, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.306262355332006e-05, |
|
"loss": 0.4542, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.254743738226721e-05, |
|
"loss": 0.2084, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_f1": 0.95822994210091, |
|
"eval_loss": 0.19439974427223206, |
|
"eval_runtime": 1.7156, |
|
"eval_samples_per_second": 58.872, |
|
"eval_steps_per_second": 7.578, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.201712553872658e-05, |
|
"loss": 0.0252, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.147214514490278e-05, |
|
"loss": 0.0166, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.0912965967108125e-05, |
|
"loss": 0.0085, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.034007001082985e-05, |
|
"loss": 0.0824, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.975395110524742e-05, |
|
"loss": 0.1629, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.9155114477557933e-05, |
|
"loss": 0.0485, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.854407631747654e-05, |
|
"loss": 0.0384, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.792136333228735e-05, |
|
"loss": 0.1495, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.728751229282836e-05, |
|
"loss": 0.0615, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.664306957080159e-05, |
|
"loss": 0.2062, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.598859066780754e-05, |
|
"loss": 0.1497, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_f1": 0.93671679197995, |
|
"eval_loss": 0.2901856303215027, |
|
"eval_runtime": 0.4385, |
|
"eval_samples_per_second": 230.348, |
|
"eval_steps_per_second": 29.649, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.5324639736509714e-05, |
|
"loss": 0.0948, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.4651789094342044e-05, |
|
"loss": 0.0662, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.39706187301784e-05, |
|
"loss": 0.1637, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.3281715804389403e-05, |
|
"loss": 0.0159, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.258567414271748e-05, |
|
"loss": 0.0065, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.18830937244065e-05, |
|
"loss": 0.0039, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.117458016502711e-05, |
|
"loss": 0.0063, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.046074419444366e-05, |
|
"loss": 0.0055, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.9742201130372693e-05, |
|
"loss": 0.002, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.901957034798671e-05, |
|
"loss": 0.003, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.8293474746020472e-05, |
|
"loss": 0.0605, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.756454020984009e-05, |
|
"loss": 0.2006, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_f1": 0.9475324675324674, |
|
"eval_loss": 0.3412802219390869, |
|
"eval_runtime": 0.4664, |
|
"eval_samples_per_second": 216.553, |
|
"eval_steps_per_second": 27.873, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.68333950719376e-05, |
|
"loss": 0.0644, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.6100669570316195e-05, |
|
"loss": 0.0029, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.5366995305232916e-05, |
|
"loss": 0.002, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.463300469476709e-05, |
|
"loss": 0.0027, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.3899330429683807e-05, |
|
"loss": 0.1243, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.3166604928062406e-05, |
|
"loss": 0.0174, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.243545979015992e-05, |
|
"loss": 0.0337, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.1706525253979534e-05, |
|
"loss": 0.0016, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.0980429652013297e-05, |
|
"loss": 0.0015, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.025779886962731e-05, |
|
"loss": 0.0014, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 1.9539255805556344e-05, |
|
"loss": 0.0147, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_f1": 0.9146959459459459, |
|
"eval_loss": 0.4439590275287628, |
|
"eval_runtime": 0.4644, |
|
"eval_samples_per_second": 217.491, |
|
"eval_steps_per_second": 27.994, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.8825419834972902e-05, |
|
"loss": 0.0012, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.811690627559351e-05, |
|
"loss": 0.0696, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.7414325857282526e-05, |
|
"loss": 0.0012, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.6718284195610606e-05, |
|
"loss": 0.0233, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.6029381269821604e-05, |
|
"loss": 0.0205, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.534821090565796e-05, |
|
"loss": 0.001, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.4675360263490295e-05, |
|
"loss": 0.002, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.4011409332192472e-05, |
|
"loss": 0.1466, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.335693042919841e-05, |
|
"loss": 0.001, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.2712487707171645e-05, |
|
"loss": 0.0012, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.2078636667712649e-05, |
|
"loss": 0.0011, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_f1": 0.9475324675324674, |
|
"eval_loss": 0.3331538140773773, |
|
"eval_runtime": 0.4482, |
|
"eval_samples_per_second": 225.335, |
|
"eval_steps_per_second": 29.003, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.1455923682523475e-05, |
|
"loss": 0.0009, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.0844885522442074e-05, |
|
"loss": 0.0013, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.0246048894752589e-05, |
|
"loss": 0.0009, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 9.659929989170154e-06, |
|
"loss": 0.001, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 9.087034032891883e-06, |
|
"loss": 0.001, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 8.527854855097225e-06, |
|
"loss": 0.0008, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 7.982874461273438e-06, |
|
"loss": 0.0008, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 7.452562617732794e-06, |
|
"loss": 0.0009, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 6.93737644667995e-06, |
|
"loss": 0.0006, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 6.4377600321782e-06, |
|
"loss": 0.0007, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 5.954144037354645e-06, |
|
"loss": 0.0007, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 5.486945333173851e-06, |
|
"loss": 0.0008, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_f1": 0.9257742782152232, |
|
"eval_loss": 0.4345126152038574, |
|
"eval_runtime": 0.4442, |
|
"eval_samples_per_second": 227.385, |
|
"eval_steps_per_second": 29.267, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 5.036566639100351e-06, |
|
"loss": 0.0006, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 4.603396175959404e-06, |
|
"loss": 0.0006, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 4.187807331295549e-06, |
|
"loss": 0.0006, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 3.7901583375171273e-06, |
|
"loss": 0.0069, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 3.4107919631044732e-06, |
|
"loss": 0.0006, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 3.0500352171476897e-06, |
|
"loss": 0.0006, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 2.708199067468939e-06, |
|
"loss": 0.0007, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 2.385578172572009e-06, |
|
"loss": 0.0006, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 2.0824506276503897e-06, |
|
"loss": 0.0006, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 1.7990777248726442e-06, |
|
"loss": 0.0005, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 1.5357037281518522e-06, |
|
"loss": 0.0005, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_f1": 0.93671679197995, |
|
"eval_loss": 0.4083709418773651, |
|
"eval_runtime": 0.4634, |
|
"eval_samples_per_second": 217.946, |
|
"eval_steps_per_second": 28.053, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 1.2925556625931173e-06, |
|
"loss": 0.0006, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 1.0698431188007952e-06, |
|
"loss": 0.0006, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 8.677580722139672e-07, |
|
"loss": 0.0005, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 6.864747176260289e-07, |
|
"loss": 0.0006, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 5.261493190309303e-07, |
|
"loss": 0.0006, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 3.8692007492557024e-07, |
|
"loss": 0.0005, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.6890699918440676e-07, |
|
"loss": 0.0005, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.7221181760899152e-07, |
|
"loss": 0.0005, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 9.691788024160376e-08, |
|
"loss": 0.0005, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 4.3090089518540987e-08, |
|
"loss": 0.0006, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.0774844325039946e-08, |
|
"loss": 0.0005, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0005, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_f1": 0.93671679197995, |
|
"eval_loss": 0.41272762417793274, |
|
"eval_runtime": 1.6882, |
|
"eval_samples_per_second": 59.826, |
|
"eval_steps_per_second": 7.7, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 570, |
|
"total_flos": 197328896575272.0, |
|
"train_loss": 0.13977860778839768, |
|
"train_runtime": 198.4818, |
|
"train_samples_per_second": 45.747, |
|
"train_steps_per_second": 2.872 |
|
} |
|
], |
|
"max_steps": 570, |
|
"num_train_epochs": 10, |
|
"total_flos": 197328896575272.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|