|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 2702, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 1.4760147601476015e-06, |
|
"loss": 2.6485, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 7.380073800738008e-06, |
|
"loss": 2.6486, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.4760147601476015e-05, |
|
"loss": 2.6449, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 2.2140221402214025e-05, |
|
"loss": 2.6688, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 2.952029520295203e-05, |
|
"loss": 2.6268, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 3.690036900369004e-05, |
|
"loss": 2.6019, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.25, |
|
"learning_rate": 4.428044280442805e-05, |
|
"loss": 2.5532, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 5.166051660516605e-05, |
|
"loss": 2.508, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 5.904059040590406e-05, |
|
"loss": 2.4591, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 6.642066420664207e-05, |
|
"loss": 2.4235, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 7.380073800738008e-05, |
|
"loss": 2.3654, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 8.118081180811809e-05, |
|
"loss": 2.3604, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.142578125, |
|
"learning_rate": 8.85608856088561e-05, |
|
"loss": 2.3221, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.11767578125, |
|
"learning_rate": 9.59409594095941e-05, |
|
"loss": 2.3041, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.0001033210332103321, |
|
"loss": 2.3051, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.00011070110701107013, |
|
"loss": 2.2795, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00011808118081180812, |
|
"loss": 2.2926, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00012546125461254613, |
|
"loss": 2.2594, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00013284132841328414, |
|
"loss": 2.2639, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00014022140221402215, |
|
"loss": 2.2496, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00014760147601476016, |
|
"loss": 2.2428, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.00015498154981549817, |
|
"loss": 2.2572, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00016236162361623618, |
|
"loss": 2.2447, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 0.0001697416974169742, |
|
"loss": 2.2258, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.0001771217712177122, |
|
"loss": 2.2284, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.0001845018450184502, |
|
"loss": 2.2346, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.0001918819188191882, |
|
"loss": 2.2365, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.00019926199261992622, |
|
"loss": 2.2068, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.0002066420664206642, |
|
"loss": 2.2316, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00021402214022140222, |
|
"loss": 2.2427, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00022140221402214025, |
|
"loss": 2.2405, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 0.00022878228782287826, |
|
"loss": 2.2197, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.00023616236162361624, |
|
"loss": 2.2411, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 0.00024354243542435425, |
|
"loss": 2.2195, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.00025092250922509226, |
|
"loss": 2.2147, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 0.00025830258302583027, |
|
"loss": 2.2341, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0002656826568265683, |
|
"loss": 2.2116, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0002730627306273063, |
|
"loss": 2.1973, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.0002804428044280443, |
|
"loss": 2.2137, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.0002878228782287823, |
|
"loss": 2.2388, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.0002952029520295203, |
|
"loss": 2.2111, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.00030258302583025833, |
|
"loss": 2.2361, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00030996309963099634, |
|
"loss": 2.1882, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.00031734317343173435, |
|
"loss": 2.2399, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.00032472324723247236, |
|
"loss": 2.2062, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.0003321033210332103, |
|
"loss": 2.2068, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.0003394833948339484, |
|
"loss": 2.2058, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.0003468634686346864, |
|
"loss": 2.2137, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0003542435424354244, |
|
"loss": 2.2127, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 0.00036162361623616235, |
|
"loss": 2.2093, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.0003690036900369004, |
|
"loss": 2.2063, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003763837638376384, |
|
"loss": 2.2347, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0003837638376383764, |
|
"loss": 2.1907, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0003911439114391144, |
|
"loss": 2.2137, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00039852398523985245, |
|
"loss": 2.1975, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.00039999732792377247, |
|
"loss": 2.203, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00039998647273646793, |
|
"loss": 2.2062, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00039996726788618994, |
|
"loss": 2.2064, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.00039993971417476293, |
|
"loss": 2.1835, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003999038127525856, |
|
"loss": 2.1989, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 0.00039985956511858335, |
|
"loss": 2.2137, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.00039980697312014523, |
|
"loss": 2.1926, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.00039974603895304704, |
|
"loss": 2.1829, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00039967676516135974, |
|
"loss": 2.1946, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00039959915463734285, |
|
"loss": 2.1814, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00039951321062132425, |
|
"loss": 2.187, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00039941893670156453, |
|
"loss": 2.2039, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 0.0003993163368141071, |
|
"loss": 2.1839, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.0003992054152426141, |
|
"loss": 2.2013, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.0003990861766181874, |
|
"loss": 2.1924, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.0003989586259191755, |
|
"loss": 2.1955, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.0003988227684709653, |
|
"loss": 2.1965, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.00039867860994575994, |
|
"loss": 2.1946, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.000398526156362342, |
|
"loss": 2.1955, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0003983654140858221, |
|
"loss": 2.1897, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00039819638982737353, |
|
"loss": 2.189, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 0.0003980190906439514, |
|
"loss": 2.1812, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00039783352393799856, |
|
"loss": 2.188, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 0.00039763969745713635, |
|
"loss": 2.1859, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.0003974376192938411, |
|
"loss": 2.1697, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.0003972272978851061, |
|
"loss": 2.2024, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 0.00039700874201208976, |
|
"loss": 2.1922, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.00039678196079974865, |
|
"loss": 2.1752, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 0.00039654696371645663, |
|
"loss": 2.1947, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.0003963037605736096, |
|
"loss": 2.194, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 0.0003960523615252156, |
|
"loss": 2.2065, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.00039579277706747125, |
|
"loss": 2.1707, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 0.00039552501803832336, |
|
"loss": 2.1889, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 0.0003952490956170161, |
|
"loss": 2.1876, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 0.00039496502132362494, |
|
"loss": 2.1877, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.000394672807018575, |
|
"loss": 2.177, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0003943724649021464, |
|
"loss": 2.2009, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 0.00039406400751396445, |
|
"loss": 2.1802, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.0003937474477324764, |
|
"loss": 2.1789, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 0.00039342279877441357, |
|
"loss": 2.1829, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00039309007419423964, |
|
"loss": 2.1871, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 0.00039274928788358477, |
|
"loss": 2.2183, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00039240045407066556, |
|
"loss": 2.1888, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 0.00039204358731969083, |
|
"loss": 2.1962, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003916787025302538, |
|
"loss": 2.1746, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.0003913058149367101, |
|
"loss": 2.1784, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.0003909249401075413, |
|
"loss": 2.1903, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 0.0003905360939447052, |
|
"loss": 2.1772, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00039013929268297195, |
|
"loss": 2.2023, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00038973455288924614, |
|
"loss": 2.1887, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.0003893218914618749, |
|
"loss": 2.1764, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00038890132562994286, |
|
"loss": 2.1911, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003884728729525525, |
|
"loss": 2.2008, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.0003880365513180908, |
|
"loss": 2.1832, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.00038759237894348306, |
|
"loss": 2.1775, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003871403743734316, |
|
"loss": 2.1768, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.0003866805564796421, |
|
"loss": 2.1825, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 0.000386212944460035, |
|
"loss": 2.191, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.0003857375578379449, |
|
"loss": 2.168, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0003852544164613043, |
|
"loss": 2.1844, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.0003847635405018162, |
|
"loss": 2.1926, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00038426495045411064, |
|
"loss": 2.1935, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.0003837586671348901, |
|
"loss": 2.1654, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00038324471168205945, |
|
"loss": 2.1816, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.0003827231055538443, |
|
"loss": 2.1697, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.0003821938705278944, |
|
"loss": 2.1581, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00038165702870037485, |
|
"loss": 2.1588, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.0003811126024850432, |
|
"loss": 2.1842, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.0003805606146123139, |
|
"loss": 2.169, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.0003800010881283093, |
|
"loss": 2.17, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003794340463938972, |
|
"loss": 2.1613, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.0003788595130837157, |
|
"loss": 2.1806, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00037827751218518494, |
|
"loss": 2.1841, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.000377688067997505, |
|
"loss": 2.1827, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.000377091205130642, |
|
"loss": 2.1846, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003764869485043003, |
|
"loss": 2.1877, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.0003758753233468823, |
|
"loss": 2.1702, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00037525635519443466, |
|
"loss": 2.1755, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00037463006988958266, |
|
"loss": 2.1691, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003739964935804509, |
|
"loss": 2.1713, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.0003733556527195719, |
|
"loss": 2.1749, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00037270757406278126, |
|
"loss": 2.1685, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00037205228466810094, |
|
"loss": 2.1818, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00037138981189460945, |
|
"loss": 2.1661, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00037072018340129936, |
|
"loss": 2.1733, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0003700434271459229, |
|
"loss": 2.1749, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003693595713838243, |
|
"loss": 2.1902, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.0003686686446667605, |
|
"loss": 2.1807, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0003679706758417087, |
|
"loss": 2.1834, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.0003672656940496621, |
|
"loss": 2.1786, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.0003665537287244134, |
|
"loss": 2.1742, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00036583480959132567, |
|
"loss": 2.1676, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003651089666660914, |
|
"loss": 2.1633, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.0003643762302534792, |
|
"loss": 2.1699, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003636366309460688, |
|
"loss": 2.1772, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00036289019962297347, |
|
"loss": 2.1602, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.000362136967448551, |
|
"loss": 2.1592, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00036137696587110234, |
|
"loss": 2.1497, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0003606102266215589, |
|
"loss": 2.1816, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0003598367817121574, |
|
"loss": 2.1681, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.0003590566634351036, |
|
"loss": 2.179, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.000358269904361224, |
|
"loss": 2.186, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00035747653733860576, |
|
"loss": 2.1635, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003566765954912256, |
|
"loss": 2.1863, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.0003558701122175666, |
|
"loss": 2.1639, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0003550571211892238, |
|
"loss": 2.1795, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00035423765634949844, |
|
"loss": 2.1634, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0003534117519119807, |
|
"loss": 2.1673, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003525794423591214, |
|
"loss": 2.1722, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00035174076244079216, |
|
"loss": 2.1687, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00035089574717283466, |
|
"loss": 2.1736, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003500444318355986, |
|
"loss": 2.1784, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003491868519724688, |
|
"loss": 2.1783, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.000348323043388381, |
|
"loss": 2.1674, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.00034745304214832726, |
|
"loss": 2.1791, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.0003465768845758502, |
|
"loss": 2.1573, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00034569460725152615, |
|
"loss": 2.1842, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00034480624701143807, |
|
"loss": 2.1624, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00034391184094563764, |
|
"loss": 2.1892, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00034301142639659663, |
|
"loss": 2.1788, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003421050409576478, |
|
"loss": 2.174, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00034119272247141536, |
|
"loss": 2.171, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.0003402745090282351, |
|
"loss": 2.1571, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00033935043896456384, |
|
"loss": 2.1716, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.000338420550861379, |
|
"loss": 2.1631, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.0003374848835425679, |
|
"loss": 2.1652, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00033654347607330656, |
|
"loss": 2.1661, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.0003355963677584288, |
|
"loss": 2.1697, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00033464359814078536, |
|
"loss": 2.1937, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.0003336852069995927, |
|
"loss": 2.1646, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003327212343487725, |
|
"loss": 2.1635, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.0003317517204352804, |
|
"loss": 2.149, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.0003307767057374266, |
|
"loss": 2.1678, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.00032979623096318487, |
|
"loss": 2.1643, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00032881033704849357, |
|
"loss": 2.1748, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00032781906515554646, |
|
"loss": 2.1617, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.0003268224566710738, |
|
"loss": 2.1529, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00032582055320461465, |
|
"loss": 2.1781, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.0003248133965867798, |
|
"loss": 2.1429, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00032380102886750493, |
|
"loss": 2.1629, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.000322783492314295, |
|
"loss": 2.1655, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.0003217608294104601, |
|
"loss": 2.1758, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.00032073308285334085, |
|
"loss": 2.1565, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.0003197002955525264, |
|
"loss": 2.1806, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00031866251062806267, |
|
"loss": 2.1701, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00031761977140865207, |
|
"loss": 2.1675, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0003165721214298444, |
|
"loss": 2.1671, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0003155196044322193, |
|
"loss": 2.1575, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.00031446226435956, |
|
"loss": 2.1381, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.0003134001453570186, |
|
"loss": 2.1626, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.00031233329176927295, |
|
"loss": 2.1648, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.0003112617481386752, |
|
"loss": 2.1671, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0003101855592033922, |
|
"loss": 2.1648, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.0003091047698955375, |
|
"loss": 2.1746, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00030801942533929545, |
|
"loss": 2.1603, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00030692957084903726, |
|
"loss": 2.172, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00030583525192742897, |
|
"loss": 2.1643, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00030473651426353167, |
|
"loss": 2.1756, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00030363340373089413, |
|
"loss": 2.1637, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00030252596638563714, |
|
"loss": 2.1718, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.000301414248464531, |
|
"loss": 2.1552, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.000300298296383065, |
|
"loss": 2.187, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.00029917815673350935, |
|
"loss": 2.1747, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0002980538762829698, |
|
"loss": 2.1677, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.00029692550197143563, |
|
"loss": 2.1476, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00029579308090981913, |
|
"loss": 2.163, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00029465666037798935, |
|
"loss": 2.1673, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0002935162878227975, |
|
"loss": 2.1737, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0002923720108560964, |
|
"loss": 2.1412, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.00029122387725275244, |
|
"loss": 2.1455, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00029007193494865103, |
|
"loss": 2.1477, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00028891623203869523, |
|
"loss": 2.1486, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0002877568167747975, |
|
"loss": 2.1703, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.0002865937375638654, |
|
"loss": 2.1819, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.0002854270429657805, |
|
"loss": 2.1564, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.0002842567816913708, |
|
"loss": 2.1723, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.0002830830026003773, |
|
"loss": 2.1585, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0002819057546994135, |
|
"loss": 2.1758, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00028072508713992007, |
|
"loss": 2.1637, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00027954104921611194, |
|
"loss": 2.1478, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00027835369036292087, |
|
"loss": 2.1585, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.000277163060153931, |
|
"loss": 2.1481, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.0002759692082993095, |
|
"loss": 2.1636, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.00027477218464373076, |
|
"loss": 2.1594, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.0002735720391642956, |
|
"loss": 2.153, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0002723688219684443, |
|
"loss": 2.1666, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.00027116258329186514, |
|
"loss": 2.1728, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0002699533734963964, |
|
"loss": 2.158, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.000268741243067924, |
|
"loss": 2.1384, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00026752624261427375, |
|
"loss": 2.1691, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.0002663084228630982, |
|
"loss": 2.1531, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0002650878346597586, |
|
"loss": 2.1576, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.0002638645289652025, |
|
"loss": 2.1653, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.0002626385568538358, |
|
"loss": 2.15, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0002614099695113901, |
|
"loss": 2.1738, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00026017881823278607, |
|
"loss": 2.1608, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00025894515441999156, |
|
"loss": 2.1914, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00025770902957987556, |
|
"loss": 2.1659, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.0002564704953220578, |
|
"loss": 2.1719, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0002552296033567541, |
|
"loss": 2.1676, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0002539864054926169, |
|
"loss": 2.165, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0002527409536345728, |
|
"loss": 2.1561, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00025149329978165516, |
|
"loss": 2.1635, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.0002502434960248331, |
|
"loss": 2.1513, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.00024899159454483665, |
|
"loss": 2.1599, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.0002477376476099784, |
|
"loss": 2.1674, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00024648170757397055, |
|
"loss": 2.14, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00024522382687374, |
|
"loss": 2.1519, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.0002439640580272384, |
|
"loss": 2.1622, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.0002427024536312496, |
|
"loss": 2.1535, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00024143906635919383, |
|
"loss": 2.1383, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00024017394895892838, |
|
"loss": 2.1282, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.00023890715425054545, |
|
"loss": 2.1554, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0002376387351241666, |
|
"loss": 2.157, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00023636874453773475, |
|
"loss": 2.1471, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.00023509723551480325, |
|
"loss": 2.1383, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.1541337966918945, |
|
"eval_runtime": 188.5948, |
|
"eval_samples_per_second": 25.785, |
|
"eval_steps_per_second": 3.224, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00023382426114232162, |
|
"loss": 2.1347, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.00023254987456841956, |
|
"loss": 2.1403, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.00023127412900018782, |
|
"loss": 2.1468, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00022999707770145653, |
|
"loss": 2.1267, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 0.0002287187739905717, |
|
"loss": 2.1406, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.00022743927123816899, |
|
"loss": 2.1304, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00022615862286494537, |
|
"loss": 2.1199, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.00022487688233942862, |
|
"loss": 2.1216, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.00022359410317574548, |
|
"loss": 2.1188, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00022231033893138668, |
|
"loss": 2.123, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.0002210256432049714, |
|
"loss": 2.1401, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 0.0002197400696340091, |
|
"loss": 2.1212, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00021845367189266042, |
|
"loss": 2.1297, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.0002171665036894959, |
|
"loss": 2.1237, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00021587861876525377, |
|
"loss": 2.1281, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00021459007089059625, |
|
"loss": 2.0898, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 0.0002133009138638645, |
|
"loss": 2.1212, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.00021201120150883234, |
|
"loss": 2.1285, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 0.00021072098767245932, |
|
"loss": 2.1302, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 0.00020943032622264238, |
|
"loss": 2.1079, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00020813927104596666, |
|
"loss": 2.1353, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.0002068478760454562, |
|
"loss": 2.1402, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 0.0002055561951383227, |
|
"loss": 2.1276, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.00020426428225371496, |
|
"loss": 2.1241, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00020297219133046714, |
|
"loss": 2.1255, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.0002016799763148467, |
|
"loss": 2.1191, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00020038769115830198, |
|
"loss": 2.1308, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00019909538981521002, |
|
"loss": 2.1422, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00019780312624062326, |
|
"loss": 2.1358, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.00019651095438801775, |
|
"loss": 2.1292, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.00019521892820703975, |
|
"loss": 2.1308, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.0001939271016412536, |
|
"loss": 2.1339, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00019263552862588948, |
|
"loss": 2.126, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.00019134426308559162, |
|
"loss": 2.1325, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00019005335893216665, |
|
"loss": 2.1302, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.0001887628700623332, |
|
"loss": 2.125, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.000187472850355471, |
|
"loss": 2.1217, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.00018618335367137195, |
|
"loss": 2.147, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.0001848944338479909, |
|
"loss": 2.1437, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00018360614469919835, |
|
"loss": 2.117, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.0001823185400125333, |
|
"loss": 2.1119, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.00018103167354695756, |
|
"loss": 2.1242, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00017974559903061172, |
|
"loss": 2.115, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 0.00017846037015857127, |
|
"loss": 2.1332, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00017717604059060518, |
|
"loss": 2.1049, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.0001758926639489354, |
|
"loss": 2.1246, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00017461029381599832, |
|
"loss": 2.1229, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.00017332898373220707, |
|
"loss": 2.1136, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.000172048787193717, |
|
"loss": 2.1204, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00017076975765019134, |
|
"loss": 2.1239, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.00016949194850257002, |
|
"loss": 2.1308, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00016821541310084006, |
|
"loss": 2.1219, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00016694020474180814, |
|
"loss": 2.1299, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 0.00016566637666687547, |
|
"loss": 2.127, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00016439398205981472, |
|
"loss": 2.1255, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00016312307404454967, |
|
"loss": 2.1149, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.0001618537056829373, |
|
"loss": 2.1105, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00016058592997255215, |
|
"loss": 2.1477, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00015931979984447385, |
|
"loss": 2.13, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00015805536816107703, |
|
"loss": 2.1188, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00015679268771382428, |
|
"loss": 2.1194, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00015553181122106234, |
|
"loss": 2.1449, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00015427279132582055, |
|
"loss": 2.122, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00015301568059361323, |
|
"loss": 2.117, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.00015176053151024502, |
|
"loss": 2.1244, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.0001505073964796194, |
|
"loss": 2.1206, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.000149256327821551, |
|
"loss": 2.1345, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00014800737776958097, |
|
"loss": 2.1259, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00014676059846879615, |
|
"loss": 2.125, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 0.00014551604197365222, |
|
"loss": 2.124, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.0001442737602458001, |
|
"loss": 2.1176, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.0001430338051519165, |
|
"loss": 2.1208, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.00014179622846153872, |
|
"loss": 2.1309, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.0001405610818449027, |
|
"loss": 2.1126, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.0001393284168707864, |
|
"loss": 2.1075, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.0001380982850043562, |
|
"loss": 2.1107, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00013687073760501828, |
|
"loss": 2.1447, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00013564582592427444, |
|
"loss": 2.1151, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.11669921875, |
|
"learning_rate": 0.00013442360110358224, |
|
"loss": 2.1212, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00013320411417221974, |
|
"loss": 2.1249, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.0001319874160451551, |
|
"loss": 2.1384, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00013077355752092061, |
|
"loss": 2.1235, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00012956258927949196, |
|
"loss": 2.1457, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00012835456188017226, |
|
"loss": 2.1232, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00012714952575948102, |
|
"loss": 2.1135, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00012594753122904858, |
|
"loss": 2.1268, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00012474862847351527, |
|
"loss": 2.1204, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00012355286754843654, |
|
"loss": 2.1317, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00012236029837819264, |
|
"loss": 2.1233, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00012117097075390449, |
|
"loss": 2.1264, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00011998493433135474, |
|
"loss": 2.1372, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00011880223862891462, |
|
"loss": 2.1296, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00011762293302547649, |
|
"loss": 2.1252, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00011644706675839232, |
|
"loss": 2.1107, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 0.00011527468892141785, |
|
"loss": 2.1107, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.00011410584846266266, |
|
"loss": 2.1142, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.0001129405941825471, |
|
"loss": 2.1143, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.00011177897473176413, |
|
"loss": 2.1129, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00011062103860924873, |
|
"loss": 2.1186, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 0.00010946683416015264, |
|
"loss": 2.1213, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00010831640957382601, |
|
"loss": 2.1192, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00010716981288180526, |
|
"loss": 2.1373, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.000106027091955808, |
|
"loss": 2.1416, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00010488829450573434, |
|
"loss": 2.1381, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 0.0001037534680776744, |
|
"loss": 2.1228, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00010262266005192399, |
|
"loss": 2.1245, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00010149591764100586, |
|
"loss": 2.1272, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.00010037328788769884, |
|
"loss": 2.1265, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 9.925481766307341e-05, |
|
"loss": 2.1364, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 9.814055366453523e-05, |
|
"loss": 2.1193, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 9.703054241387499e-05, |
|
"loss": 2.1262, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 9.592483025532652e-05, |
|
"loss": 2.144, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 9.48234633536316e-05, |
|
"loss": 2.1294, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 9.372648769211258e-05, |
|
"loss": 2.1113, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 9.263394907075244e-05, |
|
"loss": 2.1247, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 9.154589310428288e-05, |
|
"loss": 2.1161, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 9.046236522027938e-05, |
|
"loss": 2.13, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 8.938341065726508e-05, |
|
"loss": 2.1218, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 8.830907446282162e-05, |
|
"loss": 2.1196, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 8.723940149170853e-05, |
|
"loss": 2.132, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 8.617443640399056e-05, |
|
"loss": 2.12, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 8.51142236631727e-05, |
|
"loss": 2.1178, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 8.405880753434434e-05, |
|
"loss": 2.1231, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 8.300823208233062e-05, |
|
"loss": 2.1259, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 8.196254116985303e-05, |
|
"loss": 2.1061, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 8.0921778455698e-05, |
|
"loss": 2.1365, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 7.988598739289408e-05, |
|
"loss": 2.1183, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 7.885521122689753e-05, |
|
"loss": 2.1135, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 7.782949299378724e-05, |
|
"loss": 2.1114, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 7.68088755184673e-05, |
|
"loss": 2.1381, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 7.579340141287965e-05, |
|
"loss": 2.1033, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 7.478311307422456e-05, |
|
"loss": 2.1169, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 7.377805268319076e-05, |
|
"loss": 2.1155, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 7.27782622021939e-05, |
|
"loss": 2.1077, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 7.178378337362519e-05, |
|
"loss": 2.1162, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 7.079465771810828e-05, |
|
"loss": 2.1294, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 6.981092653276547e-05, |
|
"loss": 2.1204, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 6.88326308894941e-05, |
|
"loss": 2.1197, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 6.78598116332513e-05, |
|
"loss": 2.1183, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 6.68925093803489e-05, |
|
"loss": 2.1259, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 6.593076451675734e-05, |
|
"loss": 2.1203, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 6.497461719642003e-05, |
|
"loss": 2.1051, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 6.402410733957627e-05, |
|
"loss": 2.1393, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 6.307927463109504e-05, |
|
"loss": 2.131, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 6.214015851881788e-05, |
|
"loss": 2.1138, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 6.120679821191193e-05, |
|
"loss": 2.1361, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 6.027923267923279e-05, |
|
"loss": 2.1242, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 5.9357500647697786e-05, |
|
"loss": 2.1396, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 5.8441640600668924e-05, |
|
"loss": 2.1037, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 5.75316907763459e-05, |
|
"loss": 2.1335, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 5.6627689166170364e-05, |
|
"loss": 2.1064, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 5.5729673513238814e-05, |
|
"loss": 2.1039, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 5.4837681310727464e-05, |
|
"loss": 2.1427, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 5.395174980032645e-05, |
|
"loss": 2.1386, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 5.307191597068531e-05, |
|
"loss": 2.124, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 5.2198216555868206e-05, |
|
"loss": 2.1275, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 5.133068803382073e-05, |
|
"loss": 2.1182, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 5.046936662484658e-05, |
|
"loss": 2.1199, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 4.9614288290095467e-05, |
|
"loss": 2.1488, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 4.8765488730061485e-05, |
|
"loss": 2.1171, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 4.792300338309288e-05, |
|
"loss": 2.1305, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 4.70868674239124e-05, |
|
"loss": 2.1236, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 4.625711576214831e-05, |
|
"loss": 2.1145, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 4.543378304087746e-05, |
|
"loss": 2.1362, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 4.461690363517857e-05, |
|
"loss": 2.1243, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 4.380651165069707e-05, |
|
"loss": 2.1126, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 4.3002640922221084e-05, |
|
"loss": 2.1095, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 4.220532501226902e-05, |
|
"loss": 2.1351, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 4.141459720968792e-05, |
|
"loss": 2.1225, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 4.0630490528264196e-05, |
|
"loss": 2.1168, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 3.985303770534459e-05, |
|
"loss": 2.1339, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 3.908227120046983e-05, |
|
"loss": 2.1018, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 3.831822319401916e-05, |
|
"loss": 2.1128, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 3.756092558586694e-05, |
|
"loss": 2.1124, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 3.681040999405079e-05, |
|
"loss": 2.1334, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 3.606670775345116e-05, |
|
"loss": 2.1247, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 3.532984991448356e-05, |
|
"loss": 2.1211, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 3.459986724180189e-05, |
|
"loss": 2.1455, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 3.387679021301406e-05, |
|
"loss": 2.1305, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 3.316064901740934e-05, |
|
"loss": 2.1421, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 3.245147355469822e-05, |
|
"loss": 2.1317, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 3.174929343376374e-05, |
|
"loss": 2.1087, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.119140625, |
|
"learning_rate": 3.105413797142576e-05, |
|
"loss": 2.1278, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 3.0366036191216274e-05, |
|
"loss": 2.1105, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 2.9685016822168287e-05, |
|
"loss": 2.1215, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 2.9011108297615908e-05, |
|
"loss": 2.1419, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 2.834433875400755e-05, |
|
"loss": 2.1307, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 2.768473602973083e-05, |
|
"loss": 2.1006, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 2.7032327663950675e-05, |
|
"loss": 2.1167, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 2.6387140895459284e-05, |
|
"loss": 2.1192, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 2.5749202661538972e-05, |
|
"loss": 2.1193, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 2.511853959683752e-05, |
|
"loss": 2.119, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 2.4495178032255918e-05, |
|
"loss": 2.1334, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 2.3879143993849474e-05, |
|
"loss": 2.1016, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 2.3270463201740665e-05, |
|
"loss": 2.1279, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 2.2669161069045863e-05, |
|
"loss": 2.1051, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 2.2075262700813747e-05, |
|
"loss": 2.1403, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 2.148879289297756e-05, |
|
"loss": 2.1177, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 2.0909776131319548e-05, |
|
"loss": 2.1135, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 2.0338236590448978e-05, |
|
"loss": 2.1412, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 1.9774198132792353e-05, |
|
"loss": 2.1297, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 1.9217684307597806e-05, |
|
"loss": 2.1109, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 1.866871834995112e-05, |
|
"loss": 2.1333, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 1.8127323179806234e-05, |
|
"loss": 2.1076, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 1.7593521401027967e-05, |
|
"loss": 2.1145, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 1.7067335300448506e-05, |
|
"loss": 2.1214, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 1.654878684693677e-05, |
|
"loss": 2.1398, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 1.6037897690481075e-05, |
|
"loss": 2.0997, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 1.5534689161285643e-05, |
|
"loss": 2.1291, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 1.5039182268879504e-05, |
|
"loss": 2.1358, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 1.4551397701239721e-05, |
|
"loss": 2.1091, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 1.4071355823927424e-05, |
|
"loss": 2.1098, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 1.3599076679237676e-05, |
|
"loss": 2.1321, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 1.3134579985362517e-05, |
|
"loss": 2.1004, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 1.2677885135567979e-05, |
|
"loss": 2.1175, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 1.2229011197384021e-05, |
|
"loss": 2.1235, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 1.1787976911808773e-05, |
|
"loss": 2.1341, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 1.1354800692525835e-05, |
|
"loss": 2.1321, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 1.092950062513567e-05, |
|
"loss": 2.0928, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 1.0512094466400402e-05, |
|
"loss": 2.1177, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 1.0102599643502508e-05, |
|
"loss": 2.1335, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 9.70103325331717e-06, |
|
"loss": 2.111, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 9.307412061698428e-06, |
|
"loss": 2.1119, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 8.92175250277929e-06, |
|
"loss": 2.1151, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 8.54407067828551e-06, |
|
"loss": 2.1228, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 8.174382356863365e-06, |
|
"loss": 2.1171, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 7.812702973421182e-06, |
|
"loss": 2.1102, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 7.4590476284852165e-06, |
|
"loss": 2.1014, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 7.113431087568745e-06, |
|
"loss": 2.1109, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 6.775867780555989e-06, |
|
"loss": 2.1115, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 6.446371801099371e-06, |
|
"loss": 2.1103, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 6.124956906031276e-06, |
|
"loss": 2.1203, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 5.811636514789598e-06, |
|
"loss": 2.1227, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 5.506423708857455e-06, |
|
"loss": 2.1251, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 5.209331231217052e-06, |
|
"loss": 2.1305, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 4.920371485817632e-06, |
|
"loss": 2.1124, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 4.639556537057677e-06, |
|
"loss": 2.11, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 4.3668981092810365e-06, |
|
"loss": 2.1314, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 4.102407586287571e-06, |
|
"loss": 2.1307, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 3.846096010857791e-06, |
|
"loss": 2.1042, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 3.5979740842918995e-06, |
|
"loss": 2.1353, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 3.3580521659628106e-06, |
|
"loss": 2.1164, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 3.126340272883899e-06, |
|
"loss": 2.1268, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 2.902848079290488e-06, |
|
"loss": 2.119, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 2.687584916236241e-06, |
|
"loss": 2.1196, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 2.4805597712032946e-06, |
|
"loss": 2.1185, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 2.281781287727247e-06, |
|
"loss": 2.1372, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 2.091257765036092e-06, |
|
"loss": 2.1092, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 1.908997157703918e-06, |
|
"loss": 2.1357, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 1.7350070753186176e-06, |
|
"loss": 2.1145, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 1.5692947821642324e-06, |
|
"loss": 2.114, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 1.4118671969177265e-06, |
|
"loss": 2.1391, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 1.2627308923600644e-06, |
|
"loss": 2.1302, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 1.1218920951018064e-06, |
|
"loss": 2.135, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 9.893566853230951e-07, |
|
"loss": 2.1385, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 8.651301965282077e-07, |
|
"loss": 2.1302, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.11669921875, |
|
"learning_rate": 7.492178153145402e-07, |
|
"loss": 2.125, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 6.416243811559808e-07, |
|
"loss": 2.1311, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 5.423543862009384e-07, |
|
"loss": 2.1087, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 4.5141197508475894e-07, |
|
"loss": 2.105, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 3.6880094475664204e-07, |
|
"loss": 2.1256, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 2.945247443212118e-07, |
|
"loss": 2.1204, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 2.2858647489441e-07, |
|
"loss": 2.1264, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 1.7098888947404412e-07, |
|
"loss": 2.1144, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 1.217343928249237e-07, |
|
"loss": 2.0979, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 8.082504137836288e-08, |
|
"loss": 2.136, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 4.8262543146382345e-08, |
|
"loss": 2.1116, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 2.404825765034424e-08, |
|
"loss": 2.1157, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.12255859375, |
|
"learning_rate": 8.183195864264192e-09, |
|
"loss": 2.1259, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 6.680201725117741e-10, |
|
"loss": 2.1135, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.14662766456604, |
|
"eval_runtime": 188.6485, |
|
"eval_samples_per_second": 25.778, |
|
"eval_steps_per_second": 3.223, |
|
"step": 2702 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 2702, |
|
"total_flos": 6.137045069867254e+17, |
|
"train_loss": 2.1601854102158, |
|
"train_runtime": 21984.2259, |
|
"train_samples_per_second": 7.864, |
|
"train_steps_per_second": 0.123 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2702, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"total_flos": 6.137045069867254e+17, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|