|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 5570, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8.968609865470853e-08, |
|
"loss": 2.0239, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.484304932735426e-07, |
|
"loss": 1.9842, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.968609865470852e-07, |
|
"loss": 1.9791, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.345291479820628e-06, |
|
"loss": 1.9583, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7937219730941704e-06, |
|
"loss": 1.9019, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.242152466367713e-06, |
|
"loss": 1.8031, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.690582959641256e-06, |
|
"loss": 1.7329, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.1390134529147986e-06, |
|
"loss": 1.6897, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.587443946188341e-06, |
|
"loss": 1.6468, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.0358744394618836e-06, |
|
"loss": 1.6347, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.484304932735426e-06, |
|
"loss": 1.5639, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.932735426008969e-06, |
|
"loss": 1.5379, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.381165919282512e-06, |
|
"loss": 1.5118, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.8295964125560544e-06, |
|
"loss": 1.484, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.278026905829597e-06, |
|
"loss": 1.4948, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.72645739910314e-06, |
|
"loss": 1.4806, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.174887892376682e-06, |
|
"loss": 1.4396, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.6233183856502244e-06, |
|
"loss": 1.4329, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.071748878923767e-06, |
|
"loss": 1.42, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.52017937219731e-06, |
|
"loss": 1.4031, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.968609865470853e-06, |
|
"loss": 1.4101, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.417040358744395e-06, |
|
"loss": 1.3792, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.865470852017938e-06, |
|
"loss": 1.4017, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0313901345291482e-05, |
|
"loss": 1.3627, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.0762331838565023e-05, |
|
"loss": 1.3775, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1210762331838564e-05, |
|
"loss": 1.3622, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1659192825112109e-05, |
|
"loss": 1.3488, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.210762331838565e-05, |
|
"loss": 1.375, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.2556053811659194e-05, |
|
"loss": 1.363, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3004484304932737e-05, |
|
"loss": 1.3265, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.345291479820628e-05, |
|
"loss": 1.3403, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.3901345291479822e-05, |
|
"loss": 1.343, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.4349775784753363e-05, |
|
"loss": 1.3238, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.4798206278026908e-05, |
|
"loss": 1.3275, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.5246636771300449e-05, |
|
"loss": 1.2888, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.569506726457399e-05, |
|
"loss": 1.2978, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6143497757847534e-05, |
|
"loss": 1.2675, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.6591928251121077e-05, |
|
"loss": 1.302, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.704035874439462e-05, |
|
"loss": 1.2428, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7488789237668162e-05, |
|
"loss": 1.2886, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7937219730941705e-05, |
|
"loss": 1.2717, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.8385650224215248e-05, |
|
"loss": 1.2753, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.883408071748879e-05, |
|
"loss": 1.2653, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9282511210762333e-05, |
|
"loss": 1.2403, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9730941704035876e-05, |
|
"loss": 1.2646, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9999950946779145e-05, |
|
"loss": 1.2439, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9999399103571276e-05, |
|
"loss": 1.2371, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.999823413457922e-05, |
|
"loss": 1.2457, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.999645611123453e-05, |
|
"loss": 1.223, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.999406514255894e-05, |
|
"loss": 1.2375, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9991061375157722e-05, |
|
"loss": 1.2303, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9987444993210666e-05, |
|
"loss": 1.2654, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.99832162184608e-05, |
|
"loss": 1.2186, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.997837531020079e-05, |
|
"loss": 1.2295, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.997292256525703e-05, |
|
"loss": 1.2621, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.996685831797147e-05, |
|
"loss": 1.2542, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.996018294018109e-05, |
|
"loss": 1.2517, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.995289684119511e-05, |
|
"loss": 1.2157, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.994500046776989e-05, |
|
"loss": 1.2732, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9936494304081536e-05, |
|
"loss": 1.2268, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9927378871696224e-05, |
|
"loss": 1.2414, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9917654729538193e-05, |
|
"loss": 1.2552, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9907322473855508e-05, |
|
"loss": 1.2284, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.989638273818347e-05, |
|
"loss": 1.205, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9884836193305788e-05, |
|
"loss": 1.2287, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9872683547213446e-05, |
|
"loss": 1.2228, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9859925545061288e-05, |
|
"loss": 1.21, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.984656296912232e-05, |
|
"loss": 1.2205, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9832596638739766e-05, |
|
"loss": 1.2289, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9818027410276813e-05, |
|
"loss": 1.1928, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9802856177064105e-05, |
|
"loss": 1.1985, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.978708386934496e-05, |
|
"loss": 1.2002, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9770711454218345e-05, |
|
"loss": 1.1955, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9753739935579576e-05, |
|
"loss": 1.223, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9736170354058746e-05, |
|
"loss": 1.2069, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9718003786956937e-05, |
|
"loss": 1.1913, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9699241348180155e-05, |
|
"loss": 1.2101, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9679884188171023e-05, |
|
"loss": 1.1892, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.965993349383826e-05, |
|
"loss": 1.185, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9639390488483873e-05, |
|
"loss": 1.2346, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.961825643172819e-05, |
|
"loss": 1.1946, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.959653261943259e-05, |
|
"loss": 1.2074, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9574220383620054e-05, |
|
"loss": 1.2111, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9551321092393505e-05, |
|
"loss": 1.1687, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9527836149851897e-05, |
|
"loss": 1.1676, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9503766996004143e-05, |
|
"loss": 1.2078, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9479115106680807e-05, |
|
"loss": 1.1775, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9453881993443624e-05, |
|
"loss": 1.2089, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9428069203492784e-05, |
|
"loss": 1.1762, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9401678319572115e-05, |
|
"loss": 1.1667, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9374710959871986e-05, |
|
"loss": 1.1833, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9347168777930118e-05, |
|
"loss": 1.1816, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.931905346253017e-05, |
|
"loss": 1.2159, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9290366737598226e-05, |
|
"loss": 1.2015, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9261110362097046e-05, |
|
"loss": 1.1925, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9231286129918244e-05, |
|
"loss": 1.2161, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9200895869772273e-05, |
|
"loss": 1.2061, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9169941445076325e-05, |
|
"loss": 1.1669, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9138424753840036e-05, |
|
"loss": 1.1644, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9106347728549134e-05, |
|
"loss": 1.142, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.907371233604693e-05, |
|
"loss": 1.163, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9040520577413734e-05, |
|
"loss": 1.1706, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9006774487844145e-05, |
|
"loss": 1.1712, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.897247613652226e-05, |
|
"loss": 1.1564, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8937627626494804e-05, |
|
"loss": 1.1617, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.890223109454218e-05, |
|
"loss": 1.1242, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8866288711047452e-05, |
|
"loss": 1.1557, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8829802679863246e-05, |
|
"loss": 1.1654, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8792775238176655e-05, |
|
"loss": 1.1622, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8755208656372016e-05, |
|
"loss": 1.192, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.871710523789174e-05, |
|
"loss": 1.1524, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8678467319095045e-05, |
|
"loss": 1.1752, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8639297269114725e-05, |
|
"loss": 1.1476, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.859959748971186e-05, |
|
"loss": 1.1557, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8559370415128562e-05, |
|
"loss": 1.175, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.851861851193871e-05, |
|
"loss": 1.1588, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8477344278896708e-05, |
|
"loss": 1.1646, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.843555024678429e-05, |
|
"loss": 1.1518, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.839323897825531e-05, |
|
"loss": 1.1741, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.8350413067678636e-05, |
|
"loss": 1.1663, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8307075140979057e-05, |
|
"loss": 1.1659, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8263227855476275e-05, |
|
"loss": 1.1646, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.821887389972198e-05, |
|
"loss": 1.1876, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.817401599333499e-05, |
|
"loss": 1.1587, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.812865688683447e-05, |
|
"loss": 1.1564, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.808279936147133e-05, |
|
"loss": 1.1344, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.8036446229057653e-05, |
|
"loss": 1.1542, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.798960033179428e-05, |
|
"loss": 1.1723, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.7942264542096574e-05, |
|
"loss": 1.1705, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7894441762418254e-05, |
|
"loss": 1.1291, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7846134925073452e-05, |
|
"loss": 1.1621, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7797346992056913e-05, |
|
"loss": 1.174, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.774808095486235e-05, |
|
"loss": 1.1611, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7698339834299064e-05, |
|
"loss": 1.1515, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7648126680306674e-05, |
|
"loss": 1.1548, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.7597444571768133e-05, |
|
"loss": 1.1478, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.754629661632093e-05, |
|
"loss": 1.1534, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7494685950166547e-05, |
|
"loss": 1.1537, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.744261573787816e-05, |
|
"loss": 1.152, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7390089172206594e-05, |
|
"loss": 1.1377, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7337109473884558e-05, |
|
"loss": 1.1366, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.728367989142916e-05, |
|
"loss": 1.1217, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.722980370094273e-05, |
|
"loss": 1.142, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7175484205911922e-05, |
|
"loss": 1.1475, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.7120724737005176e-05, |
|
"loss": 1.1322, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.706552865186848e-05, |
|
"loss": 1.1392, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7009899334919506e-05, |
|
"loss": 1.1519, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.695384019714007e-05, |
|
"loss": 1.1396, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.689735467586701e-05, |
|
"loss": 1.1561, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.1560195684432983, |
|
"eval_runtime": 298.5995, |
|
"eval_samples_per_second": 13.419, |
|
"eval_steps_per_second": 0.211, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.909651722005937e-05, |
|
"loss": 1.0557, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9074695017537226e-05, |
|
"loss": 1.0677, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9052625184621195e-05, |
|
"loss": 1.0752, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9030308323553167e-05, |
|
"loss": 1.0783, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9007745043315927e-05, |
|
"loss": 1.0809, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.898493595961656e-05, |
|
"loss": 1.065, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8961881694869628e-05, |
|
"loss": 1.0763, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8938582878180208e-05, |
|
"loss": 1.0592, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8915040145326707e-05, |
|
"loss": 1.0468, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8891254138743516e-05, |
|
"loss": 1.0455, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8867225507503497e-05, |
|
"loss": 1.0601, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8842954907300236e-05, |
|
"loss": 1.0993, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.881844300043019e-05, |
|
"loss": 1.0603, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.879369045577459e-05, |
|
"loss": 1.0636, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.876869794878118e-05, |
|
"loss": 1.1158, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8743466161445823e-05, |
|
"loss": 1.0916, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.871799578229385e-05, |
|
"loss": 1.041, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.869228750636129e-05, |
|
"loss": 1.0784, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8666342035175912e-05, |
|
"loss": 1.0706, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8640160076738063e-05, |
|
"loss": 1.0649, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.861374234550136e-05, |
|
"loss": 1.0736, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.8587089562353197e-05, |
|
"loss": 1.0509, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8560202454595052e-05, |
|
"loss": 1.0961, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8533081755922673e-05, |
|
"loss": 1.0487, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.850572820640603e-05, |
|
"loss": 1.0896, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8478142552469135e-05, |
|
"loss": 1.0771, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.8450325546869662e-05, |
|
"loss": 1.0476, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.842227794867842e-05, |
|
"loss": 1.0773, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.839400052325863e-05, |
|
"loss": 1.0997, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.8365494042245032e-05, |
|
"loss": 1.0832, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.833675928352285e-05, |
|
"loss": 1.07, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8307797031206543e-05, |
|
"loss": 1.0794, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8278608075618433e-05, |
|
"loss": 1.0934, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.8249193213267103e-05, |
|
"loss": 1.0513, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.82195532468257e-05, |
|
"loss": 1.12, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.8189688985110007e-05, |
|
"loss": 1.0765, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.815960124305638e-05, |
|
"loss": 1.0554, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.8129290841699508e-05, |
|
"loss": 1.0726, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.8098758608150017e-05, |
|
"loss": 1.0272, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.806800537557188e-05, |
|
"loss": 1.0755, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.8037031983159702e-05, |
|
"loss": 1.0735, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.8005839276115817e-05, |
|
"loss": 1.0892, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.797442810562721e-05, |
|
"loss": 1.0842, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.7942799328842308e-05, |
|
"loss": 1.0767, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.791095380884757e-05, |
|
"loss": 1.0623, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7878892414643963e-05, |
|
"loss": 1.0484, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7846616021123216e-05, |
|
"loss": 1.0691, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.781412550904397e-05, |
|
"loss": 1.0957, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.778142176500774e-05, |
|
"loss": 1.0709, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.77485056814347e-05, |
|
"loss": 1.0784, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.7715378156539376e-05, |
|
"loss": 1.0557, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7682040094306077e-05, |
|
"loss": 1.0809, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7648492404464284e-05, |
|
"loss": 1.0706, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.761473600246378e-05, |
|
"loss": 1.0717, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.7580771809449696e-05, |
|
"loss": 1.0617, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.754660075223737e-05, |
|
"loss": 1.0623, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.751222376328704e-05, |
|
"loss": 1.0665, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7477641780678435e-05, |
|
"loss": 1.064, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7442855748085125e-05, |
|
"loss": 1.0715, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7407866614748825e-05, |
|
"loss": 1.0547, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7372675335453445e-05, |
|
"loss": 1.0481, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.733728287049907e-05, |
|
"loss": 1.0567, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7301690185675734e-05, |
|
"loss": 1.0519, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7265898252237075e-05, |
|
"loss": 1.0468, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.722990804687384e-05, |
|
"loss": 1.0498, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7193720551687205e-05, |
|
"loss": 1.0638, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7157336754162003e-05, |
|
"loss": 1.06, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.712075764713977e-05, |
|
"loss": 1.0448, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.708398422879165e-05, |
|
"loss": 1.069, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7047017502591147e-05, |
|
"loss": 1.0614, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7009858477286762e-05, |
|
"loss": 1.0555, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6972508166874455e-05, |
|
"loss": 1.0565, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6934967590569974e-05, |
|
"loss": 1.0523, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.6897237772781046e-05, |
|
"loss": 1.0827, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.1386452913284302, |
|
"eval_runtime": 270.1529, |
|
"eval_samples_per_second": 14.832, |
|
"eval_steps_per_second": 0.233, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.6859319743079418e-05, |
|
"loss": 1.0631, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.6821214536172776e-05, |
|
"loss": 1.0932, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.678292319187649e-05, |
|
"loss": 1.0894, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.6744446755085262e-05, |
|
"loss": 1.0334, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.6705786275744583e-05, |
|
"loss": 1.0676, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.6666942808822108e-05, |
|
"loss": 1.0437, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.6627917414278868e-05, |
|
"loss": 1.0324, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.658871115704032e-05, |
|
"loss": 1.0557, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.6549325106967325e-05, |
|
"loss": 1.0512, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.650976033882691e-05, |
|
"loss": 1.0486, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.6470017932262984e-05, |
|
"loss": 1.0355, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.6430098971766838e-05, |
|
"loss": 1.0232, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.6390004546647585e-05, |
|
"loss": 1.0065, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.6349735751002406e-05, |
|
"loss": 1.0284, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.6309293683686717e-05, |
|
"loss": 1.0459, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.6268679448284166e-05, |
|
"loss": 1.0362, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.6227894153076527e-05, |
|
"loss": 1.0292, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.6186938911013462e-05, |
|
"loss": 1.0333, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.614581483968214e-05, |
|
"loss": 1.0149, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.610452306127675e-05, |
|
"loss": 1.0859, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.606306470256786e-05, |
|
"loss": 1.0579, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.60214408948717e-05, |
|
"loss": 1.0825, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.5979652774019267e-05, |
|
"loss": 1.0193, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.593770148032534e-05, |
|
"loss": 1.0099, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.5895588158557356e-05, |
|
"loss": 1.0205, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.5853313957904178e-05, |
|
"loss": 1.0181, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.5810880031944743e-05, |
|
"loss": 1.0367, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.5768287538616553e-05, |
|
"loss": 1.0345, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.5725537640184122e-05, |
|
"loss": 1.0709, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.568263150320722e-05, |
|
"loss": 1.0312, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.5639570298509067e-05, |
|
"loss": 1.0226, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.5596355201144367e-05, |
|
"loss": 1.0455, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.5552987390367246e-05, |
|
"loss": 1.0069, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.550946804959909e-05, |
|
"loss": 1.0713, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.5465798366396215e-05, |
|
"loss": 1.036, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.5421979532417502e-05, |
|
"loss": 1.0206, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.5378012743391856e-05, |
|
"loss": 1.0739, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.5333899199085577e-05, |
|
"loss": 1.013, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.5289640103269626e-05, |
|
"loss": 1.0229, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.524523666368678e-05, |
|
"loss": 1.0486, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.520069009201866e-05, |
|
"loss": 1.0147, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.5156001603852693e-05, |
|
"loss": 1.0681, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.511117241864891e-05, |
|
"loss": 1.0338, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.5066203759706696e-05, |
|
"loss": 1.0418, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.5021096854131387e-05, |
|
"loss": 1.0372, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.4975852932800805e-05, |
|
"loss": 1.0394, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4930473230331652e-05, |
|
"loss": 1.0371, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4884958985045835e-05, |
|
"loss": 1.0233, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4839311438936653e-05, |
|
"loss": 1.0207, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4793531837634935e-05, |
|
"loss": 1.054, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4747621430375032e-05, |
|
"loss": 1.0572, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4701581469960718e-05, |
|
"loss": 1.0538, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4655413212731025e-05, |
|
"loss": 1.0613, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4609117918525945e-05, |
|
"loss": 1.0333, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4562696850652068e-05, |
|
"loss": 1.0556, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4516151275848078e-05, |
|
"loss": 1.0721, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4469482464250222e-05, |
|
"loss": 0.9882, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4422691689357632e-05, |
|
"loss": 1.0409, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4375780227997565e-05, |
|
"loss": 1.0527, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4328749360290587e-05, |
|
"loss": 1.0248, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.428160036961561e-05, |
|
"loss": 1.0534, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4234334542574906e-05, |
|
"loss": 0.9941, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.418695316895896e-05, |
|
"loss": 1.053, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.4139457541711303e-05, |
|
"loss": 1.0324, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4091848956893223e-05, |
|
"loss": 1.0279, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4044128713648392e-05, |
|
"loss": 1.0278, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3996298114167422e-05, |
|
"loss": 1.0633, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3948358463652317e-05, |
|
"loss": 1.0154, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3900311070280878e-05, |
|
"loss": 1.0135, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3852157245170991e-05, |
|
"loss": 1.034, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3803898302344854e-05, |
|
"loss": 1.0241, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3755535558693111e-05, |
|
"loss": 1.0702, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3707070333938939e-05, |
|
"loss": 1.038, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3658503950602007e-05, |
|
"loss": 1.0253, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3609837733962405e-05, |
|
"loss": 1.045, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3561073012024477e-05, |
|
"loss": 1.0279, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3512211115480578e-05, |
|
"loss": 1.0166, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.3463253377674774e-05, |
|
"loss": 1.0184, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.3414201134566434e-05, |
|
"loss": 1.0514, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3365055724693795e-05, |
|
"loss": 1.0346, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3315818489137442e-05, |
|
"loss": 1.0531, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3266490771483675e-05, |
|
"loss": 1.0278, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3217073917787897e-05, |
|
"loss": 1.0558, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3167569276537839e-05, |
|
"loss": 1.0481, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3117978198616787e-05, |
|
"loss": 1.0687, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.306830203726671e-05, |
|
"loss": 1.0741, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.3018542148051336e-05, |
|
"loss": 1.0073, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.2968699888819158e-05, |
|
"loss": 1.0441, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.2918776619666385e-05, |
|
"loss": 1.004, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.2868773702899828e-05, |
|
"loss": 0.9965, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.2818692502999717e-05, |
|
"loss": 1.041, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.2768534386582476e-05, |
|
"loss": 1.0049, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.2718300722363431e-05, |
|
"loss": 1.0336, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.2667992881119452e-05, |
|
"loss": 1.0232, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.2617612235651554e-05, |
|
"loss": 0.9965, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.2567160160747442e-05, |
|
"loss": 1.0322, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.251663803314398e-05, |
|
"loss": 1.0388, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.2466047231489626e-05, |
|
"loss": 0.9872, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.2415389136306835e-05, |
|
"loss": 1.0115, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.2364665129954347e-05, |
|
"loss": 1.0314, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.2313876596589505e-05, |
|
"loss": 1.0515, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.2263024922130455e-05, |
|
"loss": 1.0165, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.2212111494218339e-05, |
|
"loss": 1.0365, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.2161137702179435e-05, |
|
"loss": 1.0008, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.2110104936987228e-05, |
|
"loss": 1.0264, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.205901459122447e-05, |
|
"loss": 1.0287, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.2007868059045169e-05, |
|
"loss": 0.9973, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.1956666736136557e-05, |
|
"loss": 0.9963, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.1905412019680982e-05, |
|
"loss": 1.0434, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.1854105308317805e-05, |
|
"loss": 1.0148, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.1802748002105229e-05, |
|
"loss": 1.0251, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.1751341502482071e-05, |
|
"loss": 1.0049, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.1699887212229564e-05, |
|
"loss": 1.0155, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.1648386535433035e-05, |
|
"loss": 0.9966, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.1596840877443609e-05, |
|
"loss": 1.0214, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.1545251644839866e-05, |
|
"loss": 1.0431, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.149362024538944e-05, |
|
"loss": 1.0451, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.1441948088010627e-05, |
|
"loss": 1.0332, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.1390236582733917e-05, |
|
"loss": 1.033, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.1338487140663532e-05, |
|
"loss": 1.0366, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.1286701173938906e-05, |
|
"loss": 1.0033, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.1234880095696161e-05, |
|
"loss": 1.0273, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.1183025320029545e-05, |
|
"loss": 1.0492, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.1131138261952845e-05, |
|
"loss": 1.0282, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.1079220337360762e-05, |
|
"loss": 1.0362, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.1027272962990286e-05, |
|
"loss": 1.0302, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.0975297556382041e-05, |
|
"loss": 1.0027, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.0923295535841581e-05, |
|
"loss": 1.0089, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.087126832040071e-05, |
|
"loss": 1.0065, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.0819217329778752e-05, |
|
"loss": 1.0203, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.0767143984343808e-05, |
|
"loss": 1.0367, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.0715049705073992e-05, |
|
"loss": 1.0242, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.0662935913518662e-05, |
|
"loss": 0.9876, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.0610804031759634e-05, |
|
"loss": 0.9922, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.0558655482372358e-05, |
|
"loss": 0.9913, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.0506491688387128e-05, |
|
"loss": 0.9943, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.0454314073250216e-05, |
|
"loss": 1.0171, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.0402124060785055e-05, |
|
"loss": 1.0161, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.034992307515337e-05, |
|
"loss": 0.9986, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.0297712540816326e-05, |
|
"loss": 1.0172, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.0245493882495657e-05, |
|
"loss": 1.0079, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.0193268525134776e-05, |
|
"loss": 1.0202, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.0141037893859908e-05, |
|
"loss": 1.007, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.0088803413941183e-05, |
|
"loss": 1.0105, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.0036566510753764e-05, |
|
"loss": 0.9913, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.984328609738927e-06, |
|
"loss": 1.0487, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.932091136365193e-06, |
|
"loss": 0.995, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.879855516089396e-06, |
|
"loss": 1.055, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.827623174317815e-06, |
|
"loss": 1.0318, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.775395536367265e-06, |
|
"loss": 0.995, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.723174027426193e-06, |
|
"loss": 1.0236, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.670960072515809e-06, |
|
"loss": 1.0893, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.618755096451178e-06, |
|
"loss": 1.0244, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.566560523802359e-06, |
|
"loss": 1.0099, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.514377778855521e-06, |
|
"loss": 1.0482, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.46220828557407e-06, |
|
"loss": 1.0085, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.410053467559815e-06, |
|
"loss": 1.0098, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.357914748014092e-06, |
|
"loss": 1.0058, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.305793549698954e-06, |
|
"loss": 1.008, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.253691294898322e-06, |
|
"loss": 0.9987, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.201609405379196e-06, |
|
"loss": 0.9889, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.14954930235284e-06, |
|
"loss": 1.0021, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.09751240643602e-06, |
|
"loss": 1.0085, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.045500137612213e-06, |
|
"loss": 0.9855, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.993513915192875e-06, |
|
"loss": 1.0406, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.941555157778717e-06, |
|
"loss": 0.9954, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.889625283220964e-06, |
|
"loss": 1.0048, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.837725708582712e-06, |
|
"loss": 0.9826, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.785857850100212e-06, |
|
"loss": 1.0078, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.734023123144253e-06, |
|
"loss": 1.0104, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.682222942181523e-06, |
|
"loss": 0.9944, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.630458720736028e-06, |
|
"loss": 1.0247, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.57873187135051e-06, |
|
"loss": 1.0013, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.527043805547888e-06, |
|
"loss": 1.0306, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.47539593379277e-06, |
|
"loss": 1.0086, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.423789665452941e-06, |
|
"loss": 0.9972, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.372226408760911e-06, |
|
"loss": 1.0358, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.320707570775492e-06, |
|
"loss": 0.9843, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.269234557343388e-06, |
|
"loss": 1.0006, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.217808773060858e-06, |
|
"loss": 1.0084, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.166431621235356e-06, |
|
"loss": 1.0065, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.115104503847266e-06, |
|
"loss": 0.9779, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.063828821511618e-06, |
|
"loss": 0.9894, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.012605973439891e-06, |
|
"loss": 0.9662, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.961437357401818e-06, |
|
"loss": 0.9949, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.91032436968725e-06, |
|
"loss": 0.9958, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.859268405068052e-06, |
|
"loss": 0.9732, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.808270856760037e-06, |
|
"loss": 1.0233, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.757333116384961e-06, |
|
"loss": 0.983, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.70645657393253e-06, |
|
"loss": 0.9974, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.655642617722489e-06, |
|
"loss": 0.9945, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.604892634366727e-06, |
|
"loss": 1.0259, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.554208008731442e-06, |
|
"loss": 0.9975, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.503590123899343e-06, |
|
"loss": 0.9827, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.4530403611319235e-06, |
|
"loss": 1.0423, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.402560099831754e-06, |
|
"loss": 1.0176, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.352150717504852e-06, |
|
"loss": 1.0119, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.301813589723093e-06, |
|
"loss": 0.9805, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.251550090086657e-06, |
|
"loss": 1.0459, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.201361590186573e-06, |
|
"loss": 1.0295, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.151249459567264e-06, |
|
"loss": 0.9875, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.101215065689192e-06, |
|
"loss": 0.9925, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.051259773891543e-06, |
|
"loss": 1.0416, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.001384947354959e-06, |
|
"loss": 1.0123, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.95159194706434e-06, |
|
"loss": 0.9807, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.901882131771721e-06, |
|
"loss": 1.0175, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.852256857959178e-06, |
|
"loss": 0.9718, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.802717479801815e-06, |
|
"loss": 1.0055, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.753265349130822e-06, |
|
"loss": 1.0187, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.70390181539657e-06, |
|
"loss": 1.0281, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.654628225631801e-06, |
|
"loss": 1.0635, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.605445924414867e-06, |
|
"loss": 0.9939, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.55635625383303e-06, |
|
"loss": 0.9986, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.507360553445849e-06, |
|
"loss": 0.9905, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.458460160248627e-06, |
|
"loss": 1.0186, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.40965640863592e-06, |
|
"loss": 0.9853, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.360950630365126e-06, |
|
"loss": 1.0336, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.312344154520147e-06, |
|
"loss": 1.0262, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.263838307475115e-06, |
|
"loss": 0.9972, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.21543441285821e-06, |
|
"loss": 0.993, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.167133791515527e-06, |
|
"loss": 1.0201, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.118937761475038e-06, |
|
"loss": 1.0386, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.070847637910632e-06, |
|
"loss": 1.0138, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.0825759172439575, |
|
"eval_runtime": 269.9437, |
|
"eval_samples_per_second": 14.844, |
|
"eval_steps_per_second": 0.233, |
|
"step": 2228 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.022864733106213e-06, |
|
"loss": 0.9617, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.974990356419902e-06, |
|
"loss": 0.9457, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.927225814248303e-06, |
|
"loss": 0.9495, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.8795724099908614e-06, |
|
"loss": 0.9459, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.832031444014274e-06, |
|
"loss": 0.9169, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.784604213617038e-06, |
|
"loss": 0.8853, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.7372920129940155e-06, |
|
"loss": 0.9089, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.6900961332011485e-06, |
|
"loss": 0.888, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.643017862120208e-06, |
|
"loss": 0.9162, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.5960584844236565e-06, |
|
"loss": 0.8906, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.549219281539601e-06, |
|
"loss": 0.9347, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.502501531616799e-06, |
|
"loss": 0.9007, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.455906509489817e-06, |
|
"loss": 0.9515, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.409435486644208e-06, |
|
"loss": 0.9317, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.363089731181852e-06, |
|
"loss": 0.8994, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.316870507786305e-06, |
|
"loss": 0.9224, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.270779077688332e-06, |
|
"loss": 0.9042, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.224816698631468e-06, |
|
"loss": 0.9196, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.1789846248376995e-06, |
|
"loss": 0.8837, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.133284106973251e-06, |
|
"loss": 0.916, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.087716392114428e-06, |
|
"loss": 0.9439, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.042282723713623e-06, |
|
"loss": 0.9258, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.996984341565361e-06, |
|
"loss": 0.9729, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.951822481772473e-06, |
|
"loss": 0.9285, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.9067983767123736e-06, |
|
"loss": 0.9289, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.861913255003405e-06, |
|
"loss": 0.8967, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.817168341471357e-06, |
|
"loss": 0.9569, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.772564857116002e-06, |
|
"loss": 0.9102, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.728104019077784e-06, |
|
"loss": 0.9083, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.683787040604626e-06, |
|
"loss": 0.9257, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.6396151310188045e-06, |
|
"loss": 0.9114, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.595589495683952e-06, |
|
"loss": 0.9103, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.551711335972172e-06, |
|
"loss": 0.9248, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.507981849231251e-06, |
|
"loss": 0.9305, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.464402228751976e-06, |
|
"loss": 0.9022, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.420973663735593e-06, |
|
"loss": 0.9529, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.377697339261341e-06, |
|
"loss": 0.9035, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.334574436254115e-06, |
|
"loss": 0.8879, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.291606131452247e-06, |
|
"loss": 0.9072, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.248793597375387e-06, |
|
"loss": 0.8995, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.206138002292513e-06, |
|
"loss": 0.8912, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.163640510190056e-06, |
|
"loss": 0.9425, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.121302280740115e-06, |
|
"loss": 0.8785, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.0791244692688425e-06, |
|
"loss": 0.9219, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.037108226724898e-06, |
|
"loss": 0.9573, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.995254699648047e-06, |
|
"loss": 0.9308, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.953565030137876e-06, |
|
"loss": 0.9299, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.912040355822611e-06, |
|
"loss": 0.9243, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.870681809828103e-06, |
|
"loss": 0.9235, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.8294905207468805e-06, |
|
"loss": 0.8999, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.788467612607378e-06, |
|
"loss": 0.9218, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.7476142048432283e-06, |
|
"loss": 0.9287, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.706931412262749e-06, |
|
"loss": 0.905, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.6664203450185045e-06, |
|
"loss": 0.9664, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.626082108577017e-06, |
|
"loss": 0.9128, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.585917803688603e-06, |
|
"loss": 0.9228, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.5459285263573227e-06, |
|
"loss": 0.9113, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.5061153678110905e-06, |
|
"loss": 0.9084, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.466479414471887e-06, |
|
"loss": 0.8885, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.4270217479261147e-06, |
|
"loss": 0.9083, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.387743444895084e-06, |
|
"loss": 0.9217, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.3486455772056304e-06, |
|
"loss": 0.9464, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.3097292117608694e-06, |
|
"loss": 0.9189, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.2709954105110787e-06, |
|
"loss": 0.9169, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.2324452304247266e-06, |
|
"loss": 0.9018, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.1940797234596156e-06, |
|
"loss": 0.8962, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.1558999365341927e-06, |
|
"loss": 0.8915, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.1179069114989736e-06, |
|
"loss": 0.9441, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.0801016851081124e-06, |
|
"loss": 0.9303, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.0424852889911138e-06, |
|
"loss": 0.9336, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.0050587496246707e-06, |
|
"loss": 0.9121, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.967823088304671e-06, |
|
"loss": 0.9122, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.9307793211183168e-06, |
|
"loss": 0.9178, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.8939284589164096e-06, |
|
"loss": 0.907, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.857271507285739e-06, |
|
"loss": 0.8861, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.8208094665216757e-06, |
|
"loss": 0.9148, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.784543331600854e-06, |
|
"loss": 0.9317, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7484740921540288e-06, |
|
"loss": 0.8886, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7126027324390712e-06, |
|
"loss": 0.9177, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6769302313140966e-06, |
|
"loss": 0.8915, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6414575622107787e-06, |
|
"loss": 0.92, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.6061856931077644e-06, |
|
"loss": 0.8871, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5711155865042703e-06, |
|
"loss": 0.8972, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.536248199393817e-06, |
|
"loss": 0.9284, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.501584483238102e-06, |
|
"loss": 0.8995, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.467125383941066e-06, |
|
"loss": 0.9184, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.432871841823047e-06, |
|
"loss": 0.9183, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.3988247915951445e-06, |
|
"loss": 0.9288, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.364985162333697e-06, |
|
"loss": 0.8974, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.3313538774549428e-06, |
|
"loss": 0.914, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.2979318546898145e-06, |
|
"loss": 0.9057, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.264720006058898e-06, |
|
"loss": 0.9529, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.231719237847546e-06, |
|
"loss": 0.9345, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.198930450581139e-06, |
|
"loss": 0.9018, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.166354539000528e-06, |
|
"loss": 0.8782, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.1339923920376017e-06, |
|
"loss": 0.9086, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.1018448927910497e-06, |
|
"loss": 0.9109, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.0699129185022324e-06, |
|
"loss": 0.9217, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.0381973405312806e-06, |
|
"loss": 0.9157, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.0066990243332885e-06, |
|
"loss": 0.8888, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.975418829434713e-06, |
|
"loss": 0.963, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.944357609409917e-06, |
|
"loss": 0.8953, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9135162118578644e-06, |
|
"loss": 0.9401, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.8828954783790088e-06, |
|
"loss": 0.9246, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8524962445523176e-06, |
|
"loss": 0.9401, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.822319339912474e-06, |
|
"loss": 0.8813, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7923655879272395e-06, |
|
"loss": 0.8964, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7626358059749748e-06, |
|
"loss": 0.9069, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.7331308053223594e-06, |
|
"loss": 0.9092, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.703851391102226e-06, |
|
"loss": 0.9131, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6747983622916053e-06, |
|
"loss": 0.9142, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6459725116899184e-06, |
|
"loss": 0.9299, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6173746258973443e-06, |
|
"loss": 0.8959, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.5890054852933602e-06, |
|
"loss": 0.9397, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.5608658640154361e-06, |
|
"loss": 0.9297, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5329565299379223e-06, |
|
"loss": 0.8527, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5052782446510772e-06, |
|
"loss": 0.9119, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4778317634403082e-06, |
|
"loss": 0.9469, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4506178352655475e-06, |
|
"loss": 0.873, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.423637202740813e-06, |
|
"loss": 0.9326, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3968906021139517e-06, |
|
"loss": 0.9292, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.3703787632465459e-06, |
|
"loss": 0.8877, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3441024095939913e-06, |
|
"loss": 0.9064, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.318062258185765e-06, |
|
"loss": 0.8927, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2922590196058539e-06, |
|
"loss": 0.903, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2666933979733575e-06, |
|
"loss": 0.911, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2413660909232893e-06, |
|
"loss": 0.9241, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.21627778958753e-06, |
|
"loss": 0.9215, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1914291785759658e-06, |
|
"loss": 0.9333, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1668209359578153e-06, |
|
"loss": 0.9004, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.1424537332431119e-06, |
|
"loss": 0.8789, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.118328235364401e-06, |
|
"loss": 0.9247, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.0944451006585766e-06, |
|
"loss": 0.9213, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0708049808489263e-06, |
|
"loss": 0.9284, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0474085210273378e-06, |
|
"loss": 0.8989, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0242563596367094e-06, |
|
"loss": 0.9243, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.001349128453516e-06, |
|
"loss": 0.8905, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.78687452570577e-07, |
|
"loss": 0.9108, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.562719503799967e-07, |
|
"loss": 0.8905, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.341032335562805e-07, |
|
"loss": 0.9233, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.121819070396598e-07, |
|
"loss": 0.9552, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.905085690195725e-07, |
|
"loss": 0.8892, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.690838109183463e-07, |
|
"loss": 0.9288, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.47908217375053e-07, |
|
"loss": 0.9249, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.269823662295596e-07, |
|
"loss": 0.9242, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.063068285067632e-07, |
|
"loss": 0.8873, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.858821684009987e-07, |
|
"loss": 0.9275, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.657089432606568e-07, |
|
"loss": 0.9259, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.457877035729588e-07, |
|
"loss": 0.9342, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.261189929489509e-07, |
|
"loss": 0.9211, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.067033481086605e-07, |
|
"loss": 0.9178, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.875412988664532e-07, |
|
"loss": 0.8959, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.686333681165735e-07, |
|
"loss": 0.9205, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.499800718188776e-07, |
|
"loss": 0.9074, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.315819189847517e-07, |
|
"loss": 0.918, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.134394116632314e-07, |
|
"loss": 0.889, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.955530449272861e-07, |
|
"loss": 0.9108, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.779233068603196e-07, |
|
"loss": 0.9774, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.605506785428505e-07, |
|
"loss": 0.9115, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.434356340393832e-07, |
|
"loss": 0.8917, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.265786403854711e-07, |
|
"loss": 0.8878, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.099801575749752e-07, |
|
"loss": 0.9124, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.936406385475024e-07, |
|
"loss": 0.9115, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.775605291760609e-07, |
|
"loss": 0.922, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.6174026825487997e-07, |
|
"loss": 0.9283, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.4618028748744213e-07, |
|
"loss": 0.8847, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.3088101147470306e-07, |
|
"loss": 0.9202, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.158428577035012e-07, |
|
"loss": 0.9252, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.0106623653516916e-07, |
|
"loss": 0.8904, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.865515511943363e-07, |
|
"loss": 0.9257, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.7229919775792155e-07, |
|
"loss": 0.9058, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.58309565144328e-07, |
|
"loss": 0.8647, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.4458303510282787e-07, |
|
"loss": 0.9449, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.311199822031508e-07, |
|
"loss": 0.8925, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.1792077382525544e-07, |
|
"loss": 0.895, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.04985770149312e-07, |
|
"loss": 0.9363, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.9231532414586314e-07, |
|
"loss": 0.9493, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.799097815662011e-07, |
|
"loss": 0.9185, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.677694809329334e-07, |
|
"loss": 0.9278, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.5589475353073987e-07, |
|
"loss": 0.9356, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.4428592339733157e-07, |
|
"loss": 0.885, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.329433073146148e-07, |
|
"loss": 0.9391, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.2186721480004114e-07, |
|
"loss": 0.9084, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.1105794809816782e-07, |
|
"loss": 0.9021, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.0051580217240185e-07, |
|
"loss": 0.9618, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.9024106469695547e-07, |
|
"loss": 0.9278, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.802340160489968e-07, |
|
"loss": 0.922, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.704949293009972e-07, |
|
"loss": 0.8844, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.610240702132826e-07, |
|
"loss": 0.9047, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.5182169722677497e-07, |
|
"loss": 0.8983, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.4288806145594692e-07, |
|
"loss": 0.9385, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.3422340668196588e-07, |
|
"loss": 0.9109, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.2582796934604292e-07, |
|
"loss": 0.9284, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1770197854297893e-07, |
|
"loss": 0.877, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0984565601491415e-07, |
|
"loss": 0.9205, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.0225921614527956e-07, |
|
"loss": 0.9207, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.494286595294389e-08, |
|
"loss": 0.9282, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.789680508656473e-08, |
|
"loss": 0.922, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.112122581913962e-08, |
|
"loss": 0.9086, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.461631304276462e-08, |
|
"loss": 0.9127, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.838224426358065e-08, |
|
"loss": 0.9173, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.241918959693838e-08, |
|
"loss": 0.9251, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.672731176274981e-08, |
|
"loss": 0.8894, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.1306766081048456e-08, |
|
"loss": 0.8684, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.615770046775603e-08, |
|
"loss": 0.8916, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.128025543064018e-08, |
|
"loss": 0.9037, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.66745640654842e-08, |
|
"loss": 0.9356, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.234075205245213e-08, |
|
"loss": 0.9158, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.8278937652662653e-08, |
|
"loss": 0.9056, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.4489231704958317e-08, |
|
"loss": 0.8952, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.0971737622883515e-08, |
|
"loss": 0.9064, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.77265513918623e-08, |
|
"loss": 0.8911, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.4753761566578261e-08, |
|
"loss": 0.9326, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.205344926855867e-08, |
|
"loss": 0.9235, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.625688183958481e-09, |
|
"loss": 0.9424, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.47054456155416e-09, |
|
"loss": 0.8567, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.588077210932907e-09, |
|
"loss": 0.8862, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.978337500887275e-09, |
|
"loss": 0.9048, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.641369358016288e-09, |
|
"loss": 0.9303, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.5772092655219618e-09, |
|
"loss": 0.9105, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.858862622189823e-10, |
|
"loss": 0.9435, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.674219417408974e-10, |
|
"loss": 0.9219, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 2.183045194725786e-11, |
|
"loss": 0.928, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.0884039402008057, |
|
"eval_runtime": 162.651, |
|
"eval_samples_per_second": 24.636, |
|
"eval_steps_per_second": 0.387, |
|
"step": 3342 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.245006253484668e-06, |
|
"loss": 0.8916, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.214166787806253e-06, |
|
"loss": 0.8846, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.183344856284602e-06, |
|
"loss": 0.9028, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.152540761544041e-06, |
|
"loss": 0.9094, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 8.121754806033773e-06, |
|
"loss": 0.8841, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.0909872920249e-06, |
|
"loss": 0.8714, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.060238521607455e-06, |
|
"loss": 0.8917, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.029508796687432e-06, |
|
"loss": 0.874, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 7.998798418983841e-06, |
|
"loss": 0.8712, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 7.968107690025716e-06, |
|
"loss": 0.8787, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 7.937436911149184e-06, |
|
"loss": 0.8686, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 7.906786383494479e-06, |
|
"loss": 0.8882, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 7.87615640800301e-06, |
|
"loss": 0.9286, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 7.845547285414399e-06, |
|
"loss": 0.8802, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 7.814959316263507e-06, |
|
"loss": 0.8919, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 7.78439280087751e-06, |
|
"loss": 0.8981, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 7.75384803937295e-06, |
|
"loss": 0.9084, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 7.723325331652765e-06, |
|
"loss": 0.9348, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 7.692824977403362e-06, |
|
"loss": 0.8967, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 7.662347276091678e-06, |
|
"loss": 0.9366, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 7.63189252696222e-06, |
|
"loss": 0.889, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 7.601461029034155e-06, |
|
"loss": 0.8943, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 7.571053081098346e-06, |
|
"loss": 0.9559, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 7.540668981714434e-06, |
|
"loss": 0.9159, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 7.510309029207914e-06, |
|
"loss": 0.8611, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 7.4799735216671786e-06, |
|
"loss": 0.8762, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 7.449662756940617e-06, |
|
"loss": 0.8756, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 7.419377032633689e-06, |
|
"loss": 0.9262, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 7.389116646105977e-06, |
|
"loss": 0.8761, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 7.358881894468304e-06, |
|
"loss": 0.8921, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 7.328673074579788e-06, |
|
"loss": 0.9248, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 7.298490483044935e-06, |
|
"loss": 0.8906, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 7.268334416210737e-06, |
|
"loss": 0.9038, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 7.238205170163744e-06, |
|
"loss": 0.9138, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 7.2081030407271715e-06, |
|
"loss": 0.9013, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 7.1780283234579976e-06, |
|
"loss": 0.9174, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 7.147981313644041e-06, |
|
"loss": 0.8708, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 7.117962306301085e-06, |
|
"loss": 0.9359, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 7.0879715961699746e-06, |
|
"loss": 0.8862, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 7.058009477713705e-06, |
|
"loss": 0.9141, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 7.028076245114565e-06, |
|
"loss": 0.9017, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 6.998172192271209e-06, |
|
"loss": 0.8628, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 6.968297612795798e-06, |
|
"loss": 0.9472, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.938452800011119e-06, |
|
"loss": 0.8825, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.908638046947679e-06, |
|
"loss": 0.8804, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.878853646340853e-06, |
|
"loss": 0.9433, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.8490998906280085e-06, |
|
"loss": 0.9129, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.819377071945608e-06, |
|
"loss": 0.8972, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 6.789685482126378e-06, |
|
"loss": 0.914, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 6.760025412696419e-06, |
|
"loss": 0.9269, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 6.730397154872341e-06, |
|
"loss": 0.8669, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 6.700800999558428e-06, |
|
"loss": 0.875, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 6.6712372373437514e-06, |
|
"loss": 0.907, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 6.6417061584993394e-06, |
|
"loss": 0.8929, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 6.612208052975326e-06, |
|
"loss": 0.8968, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 6.58274321039808e-06, |
|
"loss": 0.9157, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 6.553311920067393e-06, |
|
"loss": 0.8825, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 6.523914470953625e-06, |
|
"loss": 0.8943, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 6.494551151694854e-06, |
|
"loss": 0.8973, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 6.465222250594071e-06, |
|
"loss": 0.8858, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 6.435928055616324e-06, |
|
"loss": 0.905, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 6.406668854385895e-06, |
|
"loss": 0.926, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 6.3774449341834985e-06, |
|
"loss": 0.9108, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 6.348256581943419e-06, |
|
"loss": 0.8875, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 6.319104084250742e-06, |
|
"loss": 0.8755, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 6.289987727338502e-06, |
|
"loss": 0.9258, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 6.260907797084883e-06, |
|
"loss": 0.8721, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 6.231864579010427e-06, |
|
"loss": 0.8946, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 6.202858358275215e-06, |
|
"loss": 0.894, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 6.173889419676057e-06, |
|
"loss": 0.8819, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 6.144958047643732e-06, |
|
"loss": 0.887, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 6.116064526240151e-06, |
|
"loss": 0.8457, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 6.087209139155607e-06, |
|
"loss": 0.8673, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 6.058392169705962e-06, |
|
"loss": 0.886, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 6.029613900829876e-06, |
|
"loss": 0.8601, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 6.000874615086034e-06, |
|
"loss": 0.8498, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 5.972174594650363e-06, |
|
"loss": 0.8934, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.943514121313257e-06, |
|
"loss": 0.906, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.914893476476834e-06, |
|
"loss": 0.9065, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.886312941152146e-06, |
|
"loss": 0.872, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 5.857772795956427e-06, |
|
"loss": 0.8978, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.829273321110356e-06, |
|
"loss": 0.8698, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.800814796435275e-06, |
|
"loss": 0.8934, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.772397501350466e-06, |
|
"loss": 0.911, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.7440217148704095e-06, |
|
"loss": 0.8939, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.715687715602011e-06, |
|
"loss": 0.8998, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 5.687395781741905e-06, |
|
"loss": 0.8859, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 5.659146191073715e-06, |
|
"loss": 0.8924, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 5.6309392209652924e-06, |
|
"loss": 0.8709, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 5.602775148366053e-06, |
|
"loss": 0.899, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 5.574654249804204e-06, |
|
"loss": 0.905, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 5.546576801384045e-06, |
|
"loss": 0.9167, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 5.518543078783283e-06, |
|
"loss": 0.8507, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 5.490553357250283e-06, |
|
"loss": 0.8897, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 5.4626079116013904e-06, |
|
"loss": 0.8695, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 5.434707016218251e-06, |
|
"loss": 0.8604, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 5.4068509450450555e-06, |
|
"loss": 0.8791, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 5.379039971585929e-06, |
|
"loss": 0.8768, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 5.351274368902184e-06, |
|
"loss": 0.9302, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 5.323554409609657e-06, |
|
"loss": 0.8816, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 5.2958803658760584e-06, |
|
"loss": 0.8981, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 5.26825250941825e-06, |
|
"loss": 0.9107, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 5.2406711114996245e-06, |
|
"loss": 0.8965, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 5.213136442927425e-06, |
|
"loss": 0.9214, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 5.185648774050065e-06, |
|
"loss": 0.9129, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 5.158208374754503e-06, |
|
"loss": 0.825, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 5.130815514463595e-06, |
|
"loss": 0.9019, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 5.103470462133411e-06, |
|
"loss": 0.9306, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 5.076173486250642e-06, |
|
"loss": 0.9126, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 5.048924854829934e-06, |
|
"loss": 0.9109, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 5.021724835411252e-06, |
|
"loss": 0.8636, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.994573695057292e-06, |
|
"loss": 0.8564, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.967471700350804e-06, |
|
"loss": 0.9006, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.940419117392017e-06, |
|
"loss": 0.8914, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.913416211796019e-06, |
|
"loss": 0.898, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.886463248690122e-06, |
|
"loss": 0.8824, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.8595604927113e-06, |
|
"loss": 0.887, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.832708208003558e-06, |
|
"loss": 0.8436, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.805906658215345e-06, |
|
"loss": 0.9058, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.779156106496986e-06, |
|
"loss": 0.8642, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.752456815498075e-06, |
|
"loss": 0.8989, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.725809047364894e-06, |
|
"loss": 0.9035, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.699213063737873e-06, |
|
"loss": 0.848, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.672669125748967e-06, |
|
"loss": 0.8869, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.646177494019148e-06, |
|
"loss": 0.8993, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.619738428655814e-06, |
|
"loss": 0.8863, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.593352189250223e-06, |
|
"loss": 0.9149, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.567019034874992e-06, |
|
"loss": 0.8988, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.540739224081507e-06, |
|
"loss": 0.8707, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.514513014897395e-06, |
|
"loss": 0.8805, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.488340664824019e-06, |
|
"loss": 0.8947, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.462222430833901e-06, |
|
"loss": 0.8609, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.436158569368251e-06, |
|
"loss": 0.9086, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.410149336334412e-06, |
|
"loss": 0.9107, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.384194987103347e-06, |
|
"loss": 0.9069, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.358295776507169e-06, |
|
"loss": 0.8991, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.3324519588366e-06, |
|
"loss": 0.9011, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.3066637878384786e-06, |
|
"loss": 0.888, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.2809315167132995e-06, |
|
"loss": 0.8755, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.25525539811269e-06, |
|
"loss": 0.8731, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.22963568413695e-06, |
|
"loss": 0.8573, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.204072626332578e-06, |
|
"loss": 0.92, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.178566475689777e-06, |
|
"loss": 0.8779, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.153117482640032e-06, |
|
"loss": 0.8831, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.127725897053619e-06, |
|
"loss": 0.8932, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.1023919682371425e-06, |
|
"loss": 0.8764, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.077115944931137e-06, |
|
"loss": 0.8703, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.051898075307573e-06, |
|
"loss": 0.8988, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.026738606967438e-06, |
|
"loss": 0.882, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.001637786938322e-06, |
|
"loss": 0.88, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.976595861671971e-06, |
|
"loss": 0.879, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.951613077041873e-06, |
|
"loss": 0.8769, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.9266896783408495e-06, |
|
"loss": 0.9086, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.90182591027863e-06, |
|
"loss": 0.8886, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.877022016979481e-06, |
|
"loss": 0.8914, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.852278241979782e-06, |
|
"loss": 0.8912, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.8275948282256325e-06, |
|
"loss": 0.8714, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.8029720180704954e-06, |
|
"loss": 0.8594, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.7784100532727853e-06, |
|
"loss": 0.9012, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.753909174993513e-06, |
|
"loss": 0.8871, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.7294696237939156e-06, |
|
"loss": 0.8889, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.705091639633078e-06, |
|
"loss": 0.856, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.680775461865611e-06, |
|
"loss": 0.9155, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.656521329239271e-06, |
|
"loss": 0.8735, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.6323294798926167e-06, |
|
"loss": 0.8754, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.608200151352699e-06, |
|
"loss": 0.8701, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.584133580532696e-06, |
|
"loss": 0.8998, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.560130003729605e-06, |
|
"loss": 0.9001, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.536189656621919e-06, |
|
"loss": 0.9005, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.512312774267309e-06, |
|
"loss": 0.9006, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.488499591100324e-06, |
|
"loss": 0.8993, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.464750340930082e-06, |
|
"loss": 0.8992, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.4410652569379667e-06, |
|
"loss": 0.8436, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.417444571675367e-06, |
|
"loss": 0.8307, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.39388851706136e-06, |
|
"loss": 0.9374, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.3703973243804533e-06, |
|
"loss": 0.8965, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.34697122428031e-06, |
|
"loss": 0.8821, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.3236104467694805e-06, |
|
"loss": 0.8909, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.300315221215149e-06, |
|
"loss": 0.8928, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.2770857763408816e-06, |
|
"loss": 0.8993, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.2539223402243735e-06, |
|
"loss": 0.8958, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.2308251402952183e-06, |
|
"loss": 0.8612, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.2077944033326702e-06, |
|
"loss": 0.8681, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.18483035546342e-06, |
|
"loss": 0.8814, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.161933222159371e-06, |
|
"loss": 0.9353, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.139103228235428e-06, |
|
"loss": 0.89, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.116340597847293e-06, |
|
"loss": 0.8637, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.093645554489254e-06, |
|
"loss": 0.8833, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.071018320992004e-06, |
|
"loss": 0.8744, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.0484591195204447e-06, |
|
"loss": 0.8983, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.0259681715715094e-06, |
|
"loss": 0.8347, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.003545697971972e-06, |
|
"loss": 0.8931, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.981191918876317e-06, |
|
"loss": 0.8751, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.9589070537645347e-06, |
|
"loss": 0.8511, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 2.936691321439993e-06, |
|
"loss": 0.8524, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.914544940027277e-06, |
|
"loss": 0.8768, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 2.892468126970058e-06, |
|
"loss": 0.9014, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.8704610990289473e-06, |
|
"loss": 0.8941, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.848524072279372e-06, |
|
"loss": 0.8533, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.826657262109459e-06, |
|
"loss": 0.913, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.804860883217911e-06, |
|
"loss": 0.8897, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.7831351496119076e-06, |
|
"loss": 0.8815, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.761480274604994e-06, |
|
"loss": 0.9072, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 2.7398964708149977e-06, |
|
"loss": 0.8978, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.7183839501619304e-06, |
|
"loss": 0.8789, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.696942923865915e-06, |
|
"loss": 0.8813, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.6755736024451095e-06, |
|
"loss": 0.859, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 2.6542761957136374e-06, |
|
"loss": 0.8847, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.63305091277953e-06, |
|
"loss": 0.8953, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.6118979620426743e-06, |
|
"loss": 0.8731, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.590817551192765e-06, |
|
"loss": 0.8978, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.569809887207265e-06, |
|
"loss": 0.892, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.5488751763493757e-06, |
|
"loss": 0.8848, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.528013624166007e-06, |
|
"loss": 0.9003, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 2.507225435485766e-06, |
|
"loss": 0.8844, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.4865108144169404e-06, |
|
"loss": 0.8861, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 2.4658699643454953e-06, |
|
"loss": 0.896, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.4453030879330788e-06, |
|
"loss": 0.8928, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.4248103871150298e-06, |
|
"loss": 0.9331, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.404392063098395e-06, |
|
"loss": 0.8947, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.384048316359958e-06, |
|
"loss": 0.8848, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.363779346644264e-06, |
|
"loss": 0.8967, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.3435853529616616e-06, |
|
"loss": 0.8938, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.0775328874588013, |
|
"eval_runtime": 165.7744, |
|
"eval_samples_per_second": 24.171, |
|
"eval_steps_per_second": 0.507, |
|
"step": 4456 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.3234665335863525e-06, |
|
"loss": 0.862, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.3034230860544383e-06, |
|
"loss": 0.8098, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.2834552071619854e-06, |
|
"loss": 0.8289, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.26356309296309e-06, |
|
"loss": 0.864, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.243746938767959e-06, |
|
"loss": 0.8438, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.2240069391409824e-06, |
|
"loss": 0.7762, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.2043432878988314e-06, |
|
"loss": 0.8177, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.1847561781085535e-06, |
|
"loss": 0.7772, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.1652458020856735e-06, |
|
"loss": 0.8474, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.145812351392309e-06, |
|
"loss": 0.8485, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.126456016835289e-06, |
|
"loss": 0.8396, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.107176988464279e-06, |
|
"loss": 0.811, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.087975455569915e-06, |
|
"loss": 0.7833, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.0688516066819464e-06, |
|
"loss": 0.8312, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.049805629567384e-06, |
|
"loss": 0.8378, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.030837711228657e-06, |
|
"loss": 0.8215, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.0119480379017765e-06, |
|
"loss": 0.8479, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.9931367950545068e-06, |
|
"loss": 0.8621, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.974404167384545e-06, |
|
"loss": 0.8427, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.9557503388177057e-06, |
|
"loss": 0.8022, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.937175492506118e-06, |
|
"loss": 0.8118, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.918679810826427e-06, |
|
"loss": 0.7988, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.900263475377997e-06, |
|
"loss": 0.8303, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.8819266669811397e-06, |
|
"loss": 0.8205, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.8636695656753278e-06, |
|
"loss": 0.8022, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.845492350717435e-06, |
|
"loss": 0.8677, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.827395200579971e-06, |
|
"loss": 0.8406, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.8093782929493332e-06, |
|
"loss": 0.8164, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.7914418047240556e-06, |
|
"loss": 0.8669, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.7735859120130826e-06, |
|
"loss": 0.8369, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.7558107901340294e-06, |
|
"loss": 0.838, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.7381166136114646e-06, |
|
"loss": 0.8319, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.7205035561752014e-06, |
|
"loss": 0.8419, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.702971790758582e-06, |
|
"loss": 0.7837, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.6855214894967886e-06, |
|
"loss": 0.8214, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.6681528237251454e-06, |
|
"loss": 0.8362, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.6508659639774504e-06, |
|
"loss": 0.8347, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.6336610799842788e-06, |
|
"loss": 0.831, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.6165383406713376e-06, |
|
"loss": 0.8763, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.5994979141577937e-06, |
|
"loss": 0.8536, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.5825399677546305e-06, |
|
"loss": 0.8596, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.5656646679630027e-06, |
|
"loss": 0.7901, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.5488721804726003e-06, |
|
"loss": 0.8195, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.5321626701600213e-06, |
|
"loss": 0.7907, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.5155363010871627e-06, |
|
"loss": 0.8539, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.4989932364995874e-06, |
|
"loss": 0.8054, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.4825336388249455e-06, |
|
"loss": 0.8162, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.466157669671372e-06, |
|
"loss": 0.8393, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.4498654898258857e-06, |
|
"loss": 0.8421, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.433657259252833e-06, |
|
"loss": 0.8299, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.4175331370922995e-06, |
|
"loss": 0.8525, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.4014932816585602e-06, |
|
"loss": 0.817, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.3855378504385164e-06, |
|
"loss": 0.8278, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.3696670000901513e-06, |
|
"loss": 0.8666, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.3538808864409948e-06, |
|
"loss": 0.8466, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.3381796644865964e-06, |
|
"loss": 0.8353, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.3225634883889882e-06, |
|
"loss": 0.8093, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.307032511475188e-06, |
|
"loss": 0.8146, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.2915868862356896e-06, |
|
"loss": 0.859, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.2762267643229597e-06, |
|
"loss": 0.8337, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.2609522965499555e-06, |
|
"loss": 0.8354, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.245763632888638e-06, |
|
"loss": 0.8514, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.2306609224685029e-06, |
|
"loss": 0.8396, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.2156443135751262e-06, |
|
"loss": 0.8408, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.2007139536486823e-06, |
|
"loss": 0.838, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.1858699892825209e-06, |
|
"loss": 0.8165, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.1711125662217248e-06, |
|
"loss": 0.8046, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.156441829361661e-06, |
|
"loss": 0.8506, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.1418579227465777e-06, |
|
"loss": 0.8341, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.1273609895681815e-06, |
|
"loss": 0.8449, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.1129511721642294e-06, |
|
"loss": 0.8144, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.0986286120171452e-06, |
|
"loss": 0.8219, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.0843934497526042e-06, |
|
"loss": 0.8334, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.0702458251381765e-06, |
|
"loss": 0.8281, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.05618587708195e-06, |
|
"loss": 0.8388, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.042213743631153e-06, |
|
"loss": 0.8378, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.02832956197081e-06, |
|
"loss": 0.8276, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.014533468422405e-06, |
|
"loss": 0.8324, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.000825598442514e-06, |
|
"loss": 0.7807, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 9.872060866215071e-07, |
|
"loss": 0.8854, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 9.736750666822004e-07, |
|
"loss": 0.7875, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 9.602326714785592e-07, |
|
"loss": 0.8137, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 9.468790329943956e-07, |
|
"loss": 0.8352, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 9.336142823420558e-07, |
|
"loss": 0.8216, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 9.204385497611468e-07, |
|
"loss": 0.8317, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 9.073519646172613e-07, |
|
"loss": 0.8239, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 8.943546554006855e-07, |
|
"loss": 0.8441, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 8.814467497251678e-07, |
|
"loss": 0.8267, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 8.686283743266378e-07, |
|
"loss": 0.8074, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 8.558996550619802e-07, |
|
"loss": 0.8387, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 8.432607169077978e-07, |
|
"loss": 0.8371, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 8.3071168395917e-07, |
|
"loss": 0.792, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 8.182526794284485e-07, |
|
"loss": 0.8194, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 8.058838256440493e-07, |
|
"loss": 0.8323, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 7.936052440492348e-07, |
|
"loss": 0.8843, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 7.814170552009381e-07, |
|
"loss": 0.8377, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 7.693193787685782e-07, |
|
"loss": 0.8585, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 7.573123335328692e-07, |
|
"loss": 0.8503, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 7.45396037384677e-07, |
|
"loss": 0.8537, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 7.335706073238391e-07, |
|
"loss": 0.8476, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 7.218361594580325e-07, |
|
"loss": 0.8318, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 7.101928090016319e-07, |
|
"loss": 0.82, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.98640670274564e-07, |
|
"loss": 0.8347, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 6.871798567012034e-07, |
|
"loss": 0.8196, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 6.758104808092514e-07, |
|
"loss": 0.8426, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 6.645326542286224e-07, |
|
"loss": 0.8168, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 6.533464876903683e-07, |
|
"loss": 0.8371, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 6.422520910255714e-07, |
|
"loss": 0.8151, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 6.312495731642732e-07, |
|
"loss": 0.8499, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 6.203390421344113e-07, |
|
"loss": 0.8308, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 6.095206050607483e-07, |
|
"loss": 0.8247, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.98794368163823e-07, |
|
"loss": 0.8559, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 5.881604367589188e-07, |
|
"loss": 0.8474, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 5.776189152550083e-07, |
|
"loss": 0.8243, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 5.671699071537473e-07, |
|
"loss": 0.829, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 5.5681351504845e-07, |
|
"loss": 0.8279, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 5.465498406230774e-07, |
|
"loss": 0.8062, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 5.363789846512546e-07, |
|
"loss": 0.8327, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 5.263010469952623e-07, |
|
"loss": 0.8103, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 5.163161266050664e-07, |
|
"loss": 0.8473, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 5.064243215173525e-07, |
|
"loss": 0.843, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.966257288545451e-07, |
|
"loss": 0.8536, |
|
"step": 5065 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.869204448238729e-07, |
|
"loss": 0.7981, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.773085647164155e-07, |
|
"loss": 0.8115, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.677901829061615e-07, |
|
"loss": 0.8428, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.583653928490994e-07, |
|
"loss": 0.814, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.490342870822828e-07, |
|
"loss": 0.856, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.397969572229277e-07, |
|
"loss": 0.8272, |
|
"step": 5095 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.3065349396751997e-07, |
|
"loss": 0.8357, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.216039870909094e-07, |
|
"loss": 0.8058, |
|
"step": 5105 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.126485254454493e-07, |
|
"loss": 0.8338, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.0378719696010373e-07, |
|
"loss": 0.8632, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.950200886395916e-07, |
|
"loss": 0.833, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.8634728656354094e-07, |
|
"loss": 0.8156, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.7776887588563237e-07, |
|
"loss": 0.8148, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.692849408327637e-07, |
|
"loss": 0.8747, |
|
"step": 5135 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.608955647042345e-07, |
|
"loss": 0.852, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.526008298709127e-07, |
|
"loss": 0.8285, |
|
"step": 5145 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.44400817774434e-07, |
|
"loss": 0.8357, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.362956089264069e-07, |
|
"loss": 0.81, |
|
"step": 5155 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.2828528290760885e-07, |
|
"loss": 0.8322, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.203699183672193e-07, |
|
"loss": 0.8424, |
|
"step": 5165 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.1254959302203904e-07, |
|
"loss": 0.8368, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.0482438365572517e-07, |
|
"loss": 0.8664, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.971943661180465e-07, |
|
"loss": 0.8288, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.8965961532413155e-07, |
|
"loss": 0.861, |
|
"step": 5185 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.822202052537304e-07, |
|
"loss": 0.8271, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.7487620895050193e-07, |
|
"loss": 0.8762, |
|
"step": 5195 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.6762769852127666e-07, |
|
"loss": 0.8659, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.6047474513536953e-07, |
|
"loss": 0.8202, |
|
"step": 5205 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.5341741902386585e-07, |
|
"loss": 0.8481, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.464557894789343e-07, |
|
"loss": 0.8283, |
|
"step": 5215 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.3958992485315523e-07, |
|
"loss": 0.8172, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.328198925588432e-07, |
|
"loss": 0.803, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.261457590673799e-07, |
|
"loss": 0.8493, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.1956758990857253e-07, |
|
"loss": 0.8473, |
|
"step": 5235 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.13085449670003e-07, |
|
"loss": 0.8326, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.0669940199639526e-07, |
|
"loss": 0.857, |
|
"step": 5245 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.0040950958899463e-07, |
|
"loss": 0.8794, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.942158342049405e-07, |
|
"loss": 0.8361, |
|
"step": 5255 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.8811843665667683e-07, |
|
"loss": 0.8777, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.8211737681134155e-07, |
|
"loss": 0.8682, |
|
"step": 5265 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.7621271359018144e-07, |
|
"loss": 0.8455, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.7040450496798144e-07, |
|
"loss": 0.8415, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.6469280797248522e-07, |
|
"loss": 0.8413, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.5907767868383994e-07, |
|
"loss": 0.8099, |
|
"step": 5285 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.5355917223404682e-07, |
|
"loss": 0.801, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.48137342806417e-07, |
|
"loss": 0.8407, |
|
"step": 5295 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.4281224363504099e-07, |
|
"loss": 0.7909, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.375839270042656e-07, |
|
"loss": 0.8323, |
|
"step": 5305 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.3245244424818115e-07, |
|
"loss": 0.8212, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.274178457501174e-07, |
|
"loss": 0.8201, |
|
"step": 5315 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.2248018094214941e-07, |
|
"loss": 0.8028, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.1763949830460807e-07, |
|
"loss": 0.799, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.1289584536561149e-07, |
|
"loss": 0.8216, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.0824926870058983e-07, |
|
"loss": 0.8352, |
|
"step": 5335 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.036998139318368e-07, |
|
"loss": 0.8685, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 9.924752572805563e-08, |
|
"loss": 0.8498, |
|
"step": 5345 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 9.489244780391927e-08, |
|
"loss": 0.8421, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 9.063462291964753e-08, |
|
"loss": 0.8048, |
|
"step": 5355 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 8.647409288058405e-08, |
|
"loss": 0.8427, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 8.241089853678442e-08, |
|
"loss": 0.8407, |
|
"step": 5365 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 7.844507978261418e-08, |
|
"loss": 0.8291, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 7.457667555636372e-08, |
|
"loss": 0.8161, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 7.08057238398574e-08, |
|
"loss": 0.8289, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.713226165808606e-08, |
|
"loss": 0.8805, |
|
"step": 5385 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 6.35563250788429e-08, |
|
"loss": 0.8414, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 6.00779492123671e-08, |
|
"loss": 0.8315, |
|
"step": 5395 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 5.6697168211002904e-08, |
|
"loss": 0.865, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 5.341401526885781e-08, |
|
"loss": 0.8091, |
|
"step": 5405 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 5.022852262148492e-08, |
|
"loss": 0.8233, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 4.714072154556104e-08, |
|
"loss": 0.8586, |
|
"step": 5415 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.415064235857913e-08, |
|
"loss": 0.8414, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.1258314418554103e-08, |
|
"loss": 0.8488, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.8463766123733035e-08, |
|
"loss": 0.8216, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.576702491231432e-08, |
|
"loss": 0.8235, |
|
"step": 5435 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.3168117262181166e-08, |
|
"loss": 0.8525, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 3.0667068690640736e-08, |
|
"loss": 0.8211, |
|
"step": 5445 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.8263903754174316e-08, |
|
"loss": 0.7973, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.5958646048191983e-08, |
|
"loss": 0.82, |
|
"step": 5455 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.3751318206808316e-08, |
|
"loss": 0.8062, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.1641941902611484e-08, |
|
"loss": 0.8365, |
|
"step": 5465 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.9630537846461184e-08, |
|
"loss": 0.8252, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.7717125787272138e-08, |
|
"loss": 0.8588, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.590172451183314e-08, |
|
"loss": 0.8541, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.4184351844612753e-08, |
|
"loss": 0.8257, |
|
"step": 5485 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.2565024647590574e-08, |
|
"loss": 0.8112, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.1043758820088457e-08, |
|
"loss": 0.8213, |
|
"step": 5495 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 9.620569298615102e-09, |
|
"loss": 0.8088, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 8.29547005671838e-09, |
|
"loss": 0.785, |
|
"step": 5505 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 7.068474104852119e-09, |
|
"loss": 0.85, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 5.939593490243978e-09, |
|
"loss": 0.7999, |
|
"step": 5515 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.908839296777768e-09, |
|
"loss": 0.8472, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.976221644886868e-09, |
|
"loss": 0.8288, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.14174969145431e-09, |
|
"loss": 0.8157, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.4054316297195122e-09, |
|
"loss": 0.7948, |
|
"step": 5535 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.7672746892039016e-09, |
|
"loss": 0.8029, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.2272851356343042e-09, |
|
"loss": 0.8308, |
|
"step": 5545 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 7.854682708841044e-10, |
|
"loss": 0.8331, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.4182843291884493e-10, |
|
"loss": 0.8337, |
|
"step": 5555 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.9636899575958823e-10, |
|
"loss": 0.8564, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 4.909236944294904e-11, |
|
"loss": 0.8432, |
|
"step": 5565 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.8523, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.0952651500701904, |
|
"eval_runtime": 163.428, |
|
"eval_samples_per_second": 24.518, |
|
"eval_steps_per_second": 0.514, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 5570, |
|
"total_flos": 1167473579655168.0, |
|
"train_loss": 0.34447823324152127, |
|
"train_runtime": 12106.1256, |
|
"train_samples_per_second": 14.719, |
|
"train_steps_per_second": 0.46 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 5570, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 1167473579655168.0, |
|
"train_batch_size": 6, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|