142_aitw_sft_fbc_2b / checkpoint-260 /trainer_state.json
cjfcsjt's picture
Upload folder using huggingface_hub
7c92c16 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 19.08256880733945,
"eval_steps": 500,
"global_step": 260,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.07339449541284404,
"grad_norm": 0.094066321849823,
"learning_rate": 3.846153846153847e-06,
"loss": 0.2373,
"step": 1
},
{
"epoch": 0.14678899082568808,
"grad_norm": 0.08788317441940308,
"learning_rate": 7.692307692307694e-06,
"loss": 0.2393,
"step": 2
},
{
"epoch": 0.22018348623853212,
"grad_norm": 0.08641067892313004,
"learning_rate": 1.153846153846154e-05,
"loss": 0.232,
"step": 3
},
{
"epoch": 0.29357798165137616,
"grad_norm": 0.0884113758802414,
"learning_rate": 1.5384615384615387e-05,
"loss": 0.2312,
"step": 4
},
{
"epoch": 0.3669724770642202,
"grad_norm": 0.09627583622932434,
"learning_rate": 1.923076923076923e-05,
"loss": 0.2347,
"step": 5
},
{
"epoch": 0.44036697247706424,
"grad_norm": 0.08840294182300568,
"learning_rate": 2.307692307692308e-05,
"loss": 0.2379,
"step": 6
},
{
"epoch": 0.5137614678899083,
"grad_norm": 0.08855913579463959,
"learning_rate": 2.6923076923076923e-05,
"loss": 0.255,
"step": 7
},
{
"epoch": 0.5871559633027523,
"grad_norm": 0.0873115062713623,
"learning_rate": 3.0769230769230774e-05,
"loss": 0.2322,
"step": 8
},
{
"epoch": 0.6605504587155964,
"grad_norm": 0.0857061967253685,
"learning_rate": 3.461538461538462e-05,
"loss": 0.239,
"step": 9
},
{
"epoch": 0.7339449541284404,
"grad_norm": 0.08705756813287735,
"learning_rate": 3.846153846153846e-05,
"loss": 0.2428,
"step": 10
},
{
"epoch": 0.8073394495412844,
"grad_norm": 0.07806955277919769,
"learning_rate": 4.230769230769231e-05,
"loss": 0.2312,
"step": 11
},
{
"epoch": 0.8807339449541285,
"grad_norm": 0.07232638448476791,
"learning_rate": 4.615384615384616e-05,
"loss": 0.2254,
"step": 12
},
{
"epoch": 0.9541284403669725,
"grad_norm": 0.061769310384988785,
"learning_rate": 5e-05,
"loss": 0.2281,
"step": 13
},
{
"epoch": 1.0275229357798166,
"grad_norm": 0.058718591928482056,
"learning_rate": 5.384615384615385e-05,
"loss": 0.2288,
"step": 14
},
{
"epoch": 1.1009174311926606,
"grad_norm": 0.06115482375025749,
"learning_rate": 5.769230769230769e-05,
"loss": 0.2279,
"step": 15
},
{
"epoch": 1.1743119266055047,
"grad_norm": 0.064550019800663,
"learning_rate": 6.153846153846155e-05,
"loss": 0.2235,
"step": 16
},
{
"epoch": 1.2477064220183487,
"grad_norm": 0.062019750475883484,
"learning_rate": 6.538461538461539e-05,
"loss": 0.2196,
"step": 17
},
{
"epoch": 1.3211009174311927,
"grad_norm": 0.07217342406511307,
"learning_rate": 6.923076923076924e-05,
"loss": 0.2195,
"step": 18
},
{
"epoch": 1.3944954128440368,
"grad_norm": 0.058537840843200684,
"learning_rate": 7.307692307692307e-05,
"loss": 0.2209,
"step": 19
},
{
"epoch": 1.4678899082568808,
"grad_norm": 0.04221680015325546,
"learning_rate": 7.692307692307693e-05,
"loss": 0.2225,
"step": 20
},
{
"epoch": 1.5412844036697249,
"grad_norm": 0.055857084691524506,
"learning_rate": 8.076923076923078e-05,
"loss": 0.2165,
"step": 21
},
{
"epoch": 1.614678899082569,
"grad_norm": 0.05376669764518738,
"learning_rate": 8.461538461538461e-05,
"loss": 0.2174,
"step": 22
},
{
"epoch": 1.688073394495413,
"grad_norm": 0.06512868404388428,
"learning_rate": 8.846153846153847e-05,
"loss": 0.2023,
"step": 23
},
{
"epoch": 1.761467889908257,
"grad_norm": 0.04625726118683815,
"learning_rate": 9.230769230769232e-05,
"loss": 0.2137,
"step": 24
},
{
"epoch": 1.834862385321101,
"grad_norm": 0.052789829671382904,
"learning_rate": 9.615384615384617e-05,
"loss": 0.2084,
"step": 25
},
{
"epoch": 1.908256880733945,
"grad_norm": 0.042677175253629684,
"learning_rate": 0.0001,
"loss": 0.2113,
"step": 26
},
{
"epoch": 1.981651376146789,
"grad_norm": 0.04695236310362816,
"learning_rate": 9.999549389283606e-05,
"loss": 0.2142,
"step": 27
},
{
"epoch": 2.055045871559633,
"grad_norm": 0.036158736795186996,
"learning_rate": 9.998197638354428e-05,
"loss": 0.2085,
"step": 28
},
{
"epoch": 2.128440366972477,
"grad_norm": 0.037386395037174225,
"learning_rate": 9.995944990857849e-05,
"loss": 0.2149,
"step": 29
},
{
"epoch": 2.2018348623853212,
"grad_norm": 0.04186835139989853,
"learning_rate": 9.992791852820709e-05,
"loss": 0.2107,
"step": 30
},
{
"epoch": 2.2752293577981653,
"grad_norm": 0.05842822790145874,
"learning_rate": 9.988738792578126e-05,
"loss": 0.194,
"step": 31
},
{
"epoch": 2.3486238532110093,
"grad_norm": 0.04445941001176834,
"learning_rate": 9.983786540671051e-05,
"loss": 0.2176,
"step": 32
},
{
"epoch": 2.4220183486238533,
"grad_norm": 0.0501200370490551,
"learning_rate": 9.977935989714595e-05,
"loss": 0.2016,
"step": 33
},
{
"epoch": 2.4954128440366974,
"grad_norm": 0.042810119688510895,
"learning_rate": 9.97118819423714e-05,
"loss": 0.2091,
"step": 34
},
{
"epoch": 2.5688073394495414,
"grad_norm": 0.03298516198992729,
"learning_rate": 9.96354437049027e-05,
"loss": 0.2127,
"step": 35
},
{
"epoch": 2.6422018348623855,
"grad_norm": 0.038216155022382736,
"learning_rate": 9.955005896229543e-05,
"loss": 0.2003,
"step": 36
},
{
"epoch": 2.7155963302752295,
"grad_norm": 0.05605754256248474,
"learning_rate": 9.945574310466159e-05,
"loss": 0.1987,
"step": 37
},
{
"epoch": 2.7889908256880735,
"grad_norm": 0.04308450222015381,
"learning_rate": 9.935251313189564e-05,
"loss": 0.2102,
"step": 38
},
{
"epoch": 2.8623853211009176,
"grad_norm": 0.037231989204883575,
"learning_rate": 9.924038765061042e-05,
"loss": 0.2046,
"step": 39
},
{
"epoch": 2.9357798165137616,
"grad_norm": 0.03728229179978371,
"learning_rate": 9.911938687078324e-05,
"loss": 0.2103,
"step": 40
},
{
"epoch": 3.0091743119266057,
"grad_norm": 0.034406475722789764,
"learning_rate": 9.898953260211338e-05,
"loss": 0.2118,
"step": 41
},
{
"epoch": 3.0825688073394497,
"grad_norm": 0.03245306760072708,
"learning_rate": 9.885084825009086e-05,
"loss": 0.1988,
"step": 42
},
{
"epoch": 3.1559633027522938,
"grad_norm": 0.050477948039770126,
"learning_rate": 9.870335881177774e-05,
"loss": 0.2027,
"step": 43
},
{
"epoch": 3.229357798165138,
"grad_norm": 0.037724193185567856,
"learning_rate": 9.85470908713026e-05,
"loss": 0.2019,
"step": 44
},
{
"epoch": 3.302752293577982,
"grad_norm": 0.037339672446250916,
"learning_rate": 9.838207259506891e-05,
"loss": 0.2106,
"step": 45
},
{
"epoch": 3.376146788990826,
"grad_norm": 0.037933845072984695,
"learning_rate": 9.820833372667812e-05,
"loss": 0.199,
"step": 46
},
{
"epoch": 3.44954128440367,
"grad_norm": 0.03609978035092354,
"learning_rate": 9.802590558156862e-05,
"loss": 0.2059,
"step": 47
},
{
"epoch": 3.522935779816514,
"grad_norm": 0.04476189985871315,
"learning_rate": 9.783482104137127e-05,
"loss": 0.2152,
"step": 48
},
{
"epoch": 3.5963302752293576,
"grad_norm": 0.041969411075115204,
"learning_rate": 9.763511454798268e-05,
"loss": 0.2117,
"step": 49
},
{
"epoch": 3.669724770642202,
"grad_norm": 0.04141020029783249,
"learning_rate": 9.742682209735727e-05,
"loss": 0.1912,
"step": 50
},
{
"epoch": 3.7431192660550456,
"grad_norm": 0.03470165282487869,
"learning_rate": 9.720998123301923e-05,
"loss": 0.1968,
"step": 51
},
{
"epoch": 3.81651376146789,
"grad_norm": 0.03589041531085968,
"learning_rate": 9.698463103929542e-05,
"loss": 0.2035,
"step": 52
},
{
"epoch": 3.8899082568807337,
"grad_norm": 0.03698645904660225,
"learning_rate": 9.675081213427076e-05,
"loss": 0.1979,
"step": 53
},
{
"epoch": 3.963302752293578,
"grad_norm": 0.03854459151625633,
"learning_rate": 9.650856666246693e-05,
"loss": 0.2,
"step": 54
},
{
"epoch": 4.036697247706422,
"grad_norm": 0.037219513207674026,
"learning_rate": 9.625793828724618e-05,
"loss": 0.1981,
"step": 55
},
{
"epoch": 4.110091743119266,
"grad_norm": 0.038294121623039246,
"learning_rate": 9.599897218294122e-05,
"loss": 0.1937,
"step": 56
},
{
"epoch": 4.18348623853211,
"grad_norm": 0.04259132966399193,
"learning_rate": 9.573171502671273e-05,
"loss": 0.1922,
"step": 57
},
{
"epoch": 4.256880733944954,
"grad_norm": 0.04465475678443909,
"learning_rate": 9.54562149901362e-05,
"loss": 0.1999,
"step": 58
},
{
"epoch": 4.330275229357798,
"grad_norm": 0.041249386966228485,
"learning_rate": 9.517252173051911e-05,
"loss": 0.1921,
"step": 59
},
{
"epoch": 4.4036697247706424,
"grad_norm": 0.04421278461813927,
"learning_rate": 9.48806863819507e-05,
"loss": 0.2039,
"step": 60
},
{
"epoch": 4.477064220183486,
"grad_norm": 0.05113544315099716,
"learning_rate": 9.458076154608515e-05,
"loss": 0.186,
"step": 61
},
{
"epoch": 4.5504587155963305,
"grad_norm": 0.04417051747441292,
"learning_rate": 9.42728012826605e-05,
"loss": 0.1985,
"step": 62
},
{
"epoch": 4.623853211009174,
"grad_norm": 0.04551684111356735,
"learning_rate": 9.395686109975474e-05,
"loss": 0.1925,
"step": 63
},
{
"epoch": 4.697247706422019,
"grad_norm": 0.05105452239513397,
"learning_rate": 9.363299794378073e-05,
"loss": 0.2009,
"step": 64
},
{
"epoch": 4.770642201834862,
"grad_norm": 0.05177849158644676,
"learning_rate": 9.330127018922194e-05,
"loss": 0.1931,
"step": 65
},
{
"epoch": 4.844036697247707,
"grad_norm": 0.0584421344101429,
"learning_rate": 9.296173762811085e-05,
"loss": 0.2095,
"step": 66
},
{
"epoch": 4.91743119266055,
"grad_norm": 0.052275680005550385,
"learning_rate": 9.261446145925167e-05,
"loss": 0.2074,
"step": 67
},
{
"epoch": 4.990825688073395,
"grad_norm": 0.052674662321805954,
"learning_rate": 9.225950427718975e-05,
"loss": 0.2029,
"step": 68
},
{
"epoch": 5.064220183486238,
"grad_norm": 0.06084563210606575,
"learning_rate": 9.189693006092907e-05,
"loss": 0.1875,
"step": 69
},
{
"epoch": 5.137614678899083,
"grad_norm": 0.0493173822760582,
"learning_rate": 9.152680416240059e-05,
"loss": 0.1983,
"step": 70
},
{
"epoch": 5.2110091743119265,
"grad_norm": 0.050358761101961136,
"learning_rate": 9.114919329468282e-05,
"loss": 0.1989,
"step": 71
},
{
"epoch": 5.284403669724771,
"grad_norm": 0.050705138593912125,
"learning_rate": 9.076416551997721e-05,
"loss": 0.1929,
"step": 72
},
{
"epoch": 5.3577981651376145,
"grad_norm": 0.06104021519422531,
"learning_rate": 9.037179023734035e-05,
"loss": 0.1989,
"step": 73
},
{
"epoch": 5.431192660550459,
"grad_norm": 0.05947330221533775,
"learning_rate": 8.997213817017507e-05,
"loss": 0.2026,
"step": 74
},
{
"epoch": 5.504587155963303,
"grad_norm": 0.05319764092564583,
"learning_rate": 8.95652813534831e-05,
"loss": 0.2011,
"step": 75
},
{
"epoch": 5.577981651376147,
"grad_norm": 0.05301457270979881,
"learning_rate": 8.915129312088112e-05,
"loss": 0.1983,
"step": 76
},
{
"epoch": 5.651376146788991,
"grad_norm": 0.058974914252758026,
"learning_rate": 8.873024809138272e-05,
"loss": 0.1861,
"step": 77
},
{
"epoch": 5.724770642201835,
"grad_norm": 0.0584375262260437,
"learning_rate": 8.83022221559489e-05,
"loss": 0.1924,
"step": 78
},
{
"epoch": 5.798165137614679,
"grad_norm": 0.06012069061398506,
"learning_rate": 8.786729246380901e-05,
"loss": 0.1952,
"step": 79
},
{
"epoch": 5.871559633027523,
"grad_norm": 0.06680011004209518,
"learning_rate": 8.742553740855506e-05,
"loss": 0.1929,
"step": 80
},
{
"epoch": 5.944954128440367,
"grad_norm": 0.06496898084878922,
"learning_rate": 8.697703661401186e-05,
"loss": 0.1826,
"step": 81
},
{
"epoch": 6.018348623853211,
"grad_norm": 0.06297076493501663,
"learning_rate": 8.652187091988517e-05,
"loss": 0.1846,
"step": 82
},
{
"epoch": 6.091743119266055,
"grad_norm": 0.06112602353096008,
"learning_rate": 8.606012236719073e-05,
"loss": 0.1954,
"step": 83
},
{
"epoch": 6.165137614678899,
"grad_norm": 0.06689347326755524,
"learning_rate": 8.559187418346703e-05,
"loss": 0.1937,
"step": 84
},
{
"epoch": 6.238532110091743,
"grad_norm": 0.06930436193943024,
"learning_rate": 8.511721076777389e-05,
"loss": 0.1808,
"step": 85
},
{
"epoch": 6.3119266055045875,
"grad_norm": 0.06421869993209839,
"learning_rate": 8.463621767547998e-05,
"loss": 0.1876,
"step": 86
},
{
"epoch": 6.385321100917431,
"grad_norm": 0.07200642675161362,
"learning_rate": 8.414898160284208e-05,
"loss": 0.1946,
"step": 87
},
{
"epoch": 6.458715596330276,
"grad_norm": 0.07047700881958008,
"learning_rate": 8.365559037137852e-05,
"loss": 0.199,
"step": 88
},
{
"epoch": 6.532110091743119,
"grad_norm": 0.07283582538366318,
"learning_rate": 8.315613291203976e-05,
"loss": 0.1976,
"step": 89
},
{
"epoch": 6.605504587155964,
"grad_norm": 0.07760219275951385,
"learning_rate": 8.265069924917925e-05,
"loss": 0.1807,
"step": 90
},
{
"epoch": 6.678899082568807,
"grad_norm": 0.08349579572677612,
"learning_rate": 8.213938048432697e-05,
"loss": 0.1904,
"step": 91
},
{
"epoch": 6.752293577981652,
"grad_norm": 0.08077001571655273,
"learning_rate": 8.162226877976887e-05,
"loss": 0.1927,
"step": 92
},
{
"epoch": 6.825688073394495,
"grad_norm": 0.07862831652164459,
"learning_rate": 8.10994573419352e-05,
"loss": 0.189,
"step": 93
},
{
"epoch": 6.89908256880734,
"grad_norm": 0.08872492611408234,
"learning_rate": 8.057104040460062e-05,
"loss": 0.1907,
"step": 94
},
{
"epoch": 6.972477064220183,
"grad_norm": 0.07660839706659317,
"learning_rate": 8.003711321189895e-05,
"loss": 0.1878,
"step": 95
},
{
"epoch": 7.045871559633028,
"grad_norm": 0.08058074116706848,
"learning_rate": 7.949777200115616e-05,
"loss": 0.1946,
"step": 96
},
{
"epoch": 7.1192660550458715,
"grad_norm": 0.08907619118690491,
"learning_rate": 7.895311398554395e-05,
"loss": 0.2,
"step": 97
},
{
"epoch": 7.192660550458716,
"grad_norm": 0.08919303119182587,
"learning_rate": 7.840323733655778e-05,
"loss": 0.1741,
"step": 98
},
{
"epoch": 7.26605504587156,
"grad_norm": 0.0940893217921257,
"learning_rate": 7.784824116632197e-05,
"loss": 0.1919,
"step": 99
},
{
"epoch": 7.339449541284404,
"grad_norm": 0.11027345806360245,
"learning_rate": 7.728822550972523e-05,
"loss": 0.1921,
"step": 100
},
{
"epoch": 7.412844036697248,
"grad_norm": 0.11393953114748001,
"learning_rate": 7.672329130639005e-05,
"loss": 0.1892,
"step": 101
},
{
"epoch": 7.486238532110092,
"grad_norm": 0.10183316469192505,
"learning_rate": 7.615354038247888e-05,
"loss": 0.1852,
"step": 102
},
{
"epoch": 7.559633027522936,
"grad_norm": 0.1067890003323555,
"learning_rate": 7.557907543234051e-05,
"loss": 0.1891,
"step": 103
},
{
"epoch": 7.63302752293578,
"grad_norm": 0.10177016258239746,
"learning_rate": 7.500000000000001e-05,
"loss": 0.1897,
"step": 104
},
{
"epoch": 7.706422018348624,
"grad_norm": 0.13205887377262115,
"learning_rate": 7.441641846049556e-05,
"loss": 0.1751,
"step": 105
},
{
"epoch": 7.779816513761467,
"grad_norm": 0.09557172656059265,
"learning_rate": 7.38284360010654e-05,
"loss": 0.1808,
"step": 106
},
{
"epoch": 7.853211009174312,
"grad_norm": 0.10306868702173233,
"learning_rate": 7.323615860218843e-05,
"loss": 0.1828,
"step": 107
},
{
"epoch": 7.926605504587156,
"grad_norm": 0.12413720041513443,
"learning_rate": 7.263969301848188e-05,
"loss": 0.1848,
"step": 108
},
{
"epoch": 8.0,
"grad_norm": 0.09769123792648315,
"learning_rate": 7.203914675945929e-05,
"loss": 0.1872,
"step": 109
},
{
"epoch": 8.073394495412844,
"grad_norm": 0.08778978884220123,
"learning_rate": 7.143462807015271e-05,
"loss": 0.1802,
"step": 110
},
{
"epoch": 8.146788990825687,
"grad_norm": 0.09416276961565018,
"learning_rate": 7.082624591160201e-05,
"loss": 0.1859,
"step": 111
},
{
"epoch": 8.220183486238533,
"grad_norm": 0.09707635641098022,
"learning_rate": 7.021410994121525e-05,
"loss": 0.1819,
"step": 112
},
{
"epoch": 8.293577981651376,
"grad_norm": 0.09971417486667633,
"learning_rate": 6.959833049300377e-05,
"loss": 0.1783,
"step": 113
},
{
"epoch": 8.36697247706422,
"grad_norm": 0.12254350632429123,
"learning_rate": 6.897901855769483e-05,
"loss": 0.1876,
"step": 114
},
{
"epoch": 8.440366972477065,
"grad_norm": 0.1221860870718956,
"learning_rate": 6.835628576272638e-05,
"loss": 0.1904,
"step": 115
},
{
"epoch": 8.513761467889909,
"grad_norm": 0.11945543438196182,
"learning_rate": 6.773024435212678e-05,
"loss": 0.1801,
"step": 116
},
{
"epoch": 8.587155963302752,
"grad_norm": 0.14136166870594025,
"learning_rate": 6.710100716628344e-05,
"loss": 0.1781,
"step": 117
},
{
"epoch": 8.660550458715596,
"grad_norm": 0.155049666762352,
"learning_rate": 6.646868762160399e-05,
"loss": 0.1799,
"step": 118
},
{
"epoch": 8.73394495412844,
"grad_norm": 0.17453230917453766,
"learning_rate": 6.583339969007363e-05,
"loss": 0.1799,
"step": 119
},
{
"epoch": 8.807339449541285,
"grad_norm": 0.14369648694992065,
"learning_rate": 6.519525787871235e-05,
"loss": 0.1879,
"step": 120
},
{
"epoch": 8.880733944954128,
"grad_norm": 0.12149137258529663,
"learning_rate": 6.455437720893564e-05,
"loss": 0.1784,
"step": 121
},
{
"epoch": 8.954128440366972,
"grad_norm": 0.13801580667495728,
"learning_rate": 6.391087319582264e-05,
"loss": 0.1838,
"step": 122
},
{
"epoch": 9.027522935779816,
"grad_norm": 0.11812577396631241,
"learning_rate": 6.326486182729504e-05,
"loss": 0.1718,
"step": 123
},
{
"epoch": 9.100917431192661,
"grad_norm": 0.12246193736791611,
"learning_rate": 6.261645954321109e-05,
"loss": 0.1744,
"step": 124
},
{
"epoch": 9.174311926605505,
"grad_norm": 0.11642672121524811,
"learning_rate": 6.19657832143779e-05,
"loss": 0.1729,
"step": 125
},
{
"epoch": 9.247706422018348,
"grad_norm": 0.11366164684295654,
"learning_rate": 6.131295012148612e-05,
"loss": 0.1749,
"step": 126
},
{
"epoch": 9.321100917431192,
"grad_norm": 0.12722568213939667,
"learning_rate": 6.06580779339708e-05,
"loss": 0.1806,
"step": 127
},
{
"epoch": 9.394495412844037,
"grad_norm": 0.13534581661224365,
"learning_rate": 6.0001284688802226e-05,
"loss": 0.1782,
"step": 128
},
{
"epoch": 9.46788990825688,
"grad_norm": 0.13548819720745087,
"learning_rate": 5.9342688769210254e-05,
"loss": 0.1673,
"step": 129
},
{
"epoch": 9.541284403669724,
"grad_norm": 0.15632225573062897,
"learning_rate": 5.868240888334653e-05,
"loss": 0.1834,
"step": 130
},
{
"epoch": 9.614678899082568,
"grad_norm": 0.16248205304145813,
"learning_rate": 5.8020564042888015e-05,
"loss": 0.1821,
"step": 131
},
{
"epoch": 9.688073394495413,
"grad_norm": 0.16676151752471924,
"learning_rate": 5.735727354158581e-05,
"loss": 0.1751,
"step": 132
},
{
"epoch": 9.761467889908257,
"grad_norm": 0.17233224213123322,
"learning_rate": 5.669265693376309e-05,
"loss": 0.1708,
"step": 133
},
{
"epoch": 9.8348623853211,
"grad_norm": 0.15577177703380585,
"learning_rate": 5.602683401276615e-05,
"loss": 0.1717,
"step": 134
},
{
"epoch": 9.908256880733944,
"grad_norm": 0.17410804331302643,
"learning_rate": 5.5359924789372396e-05,
"loss": 0.1842,
"step": 135
},
{
"epoch": 9.98165137614679,
"grad_norm": 0.15566076338291168,
"learning_rate": 5.469204947015897e-05,
"loss": 0.1737,
"step": 136
},
{
"epoch": 10.055045871559633,
"grad_norm": 0.14896251261234283,
"learning_rate": 5.402332843583631e-05,
"loss": 0.1765,
"step": 137
},
{
"epoch": 10.128440366972477,
"grad_norm": 0.1541670709848404,
"learning_rate": 5.335388221955012e-05,
"loss": 0.1658,
"step": 138
},
{
"epoch": 10.20183486238532,
"grad_norm": 0.15539778769016266,
"learning_rate": 5.2683831485156074e-05,
"loss": 0.1656,
"step": 139
},
{
"epoch": 10.275229357798166,
"grad_norm": 0.16468144953250885,
"learning_rate": 5.201329700547076e-05,
"loss": 0.1783,
"step": 140
},
{
"epoch": 10.34862385321101,
"grad_norm": 0.16573026776313782,
"learning_rate": 5.134239964050307e-05,
"loss": 0.1603,
"step": 141
},
{
"epoch": 10.422018348623853,
"grad_norm": 0.18577492237091064,
"learning_rate": 5.0671260315669875e-05,
"loss": 0.1744,
"step": 142
},
{
"epoch": 10.495412844036696,
"grad_norm": 0.1860455870628357,
"learning_rate": 5e-05,
"loss": 0.1551,
"step": 143
},
{
"epoch": 10.568807339449542,
"grad_norm": 0.18118642270565033,
"learning_rate": 4.9328739684330137e-05,
"loss": 0.1653,
"step": 144
},
{
"epoch": 10.642201834862385,
"grad_norm": 0.2069067806005478,
"learning_rate": 4.865760035949695e-05,
"loss": 0.1737,
"step": 145
},
{
"epoch": 10.715596330275229,
"grad_norm": 0.22928771376609802,
"learning_rate": 4.798670299452926e-05,
"loss": 0.1743,
"step": 146
},
{
"epoch": 10.788990825688073,
"grad_norm": 0.19360584020614624,
"learning_rate": 4.731616851484392e-05,
"loss": 0.1691,
"step": 147
},
{
"epoch": 10.862385321100918,
"grad_norm": 0.19605328142642975,
"learning_rate": 4.6646117780449876e-05,
"loss": 0.1752,
"step": 148
},
{
"epoch": 10.935779816513762,
"grad_norm": 0.19604168832302094,
"learning_rate": 4.597667156416371e-05,
"loss": 0.1801,
"step": 149
},
{
"epoch": 11.009174311926605,
"grad_norm": 0.19816367328166962,
"learning_rate": 4.530795052984104e-05,
"loss": 0.172,
"step": 150
},
{
"epoch": 11.082568807339449,
"grad_norm": 0.18127745389938354,
"learning_rate": 4.4640075210627615e-05,
"loss": 0.1683,
"step": 151
},
{
"epoch": 11.155963302752294,
"grad_norm": 0.19333089888095856,
"learning_rate": 4.397316598723385e-05,
"loss": 0.1593,
"step": 152
},
{
"epoch": 11.229357798165138,
"grad_norm": 0.18506307899951935,
"learning_rate": 4.3307343066236935e-05,
"loss": 0.155,
"step": 153
},
{
"epoch": 11.302752293577981,
"grad_norm": 0.17834623157978058,
"learning_rate": 4.264272645841419e-05,
"loss": 0.1619,
"step": 154
},
{
"epoch": 11.376146788990825,
"grad_norm": 0.18679705262184143,
"learning_rate": 4.197943595711198e-05,
"loss": 0.1565,
"step": 155
},
{
"epoch": 11.44954128440367,
"grad_norm": 0.2451370358467102,
"learning_rate": 4.131759111665349e-05,
"loss": 0.1666,
"step": 156
},
{
"epoch": 11.522935779816514,
"grad_norm": 0.2141800969839096,
"learning_rate": 4.0657311230789764e-05,
"loss": 0.1557,
"step": 157
},
{
"epoch": 11.596330275229358,
"grad_norm": 0.24323846399784088,
"learning_rate": 3.9998715311197785e-05,
"loss": 0.167,
"step": 158
},
{
"epoch": 11.669724770642201,
"grad_norm": 0.2383449822664261,
"learning_rate": 3.934192206602921e-05,
"loss": 0.1684,
"step": 159
},
{
"epoch": 11.743119266055047,
"grad_norm": 0.22943173348903656,
"learning_rate": 3.86870498785139e-05,
"loss": 0.158,
"step": 160
},
{
"epoch": 11.81651376146789,
"grad_norm": 0.22590626776218414,
"learning_rate": 3.803421678562213e-05,
"loss": 0.1653,
"step": 161
},
{
"epoch": 11.889908256880734,
"grad_norm": 0.2344842404127121,
"learning_rate": 3.738354045678891e-05,
"loss": 0.1611,
"step": 162
},
{
"epoch": 11.963302752293577,
"grad_norm": 0.24516969919204712,
"learning_rate": 3.673513817270497e-05,
"loss": 0.1691,
"step": 163
},
{
"epoch": 12.036697247706423,
"grad_norm": 0.23740142583847046,
"learning_rate": 3.608912680417737e-05,
"loss": 0.1614,
"step": 164
},
{
"epoch": 12.110091743119266,
"grad_norm": 0.19846218824386597,
"learning_rate": 3.5445622791064356e-05,
"loss": 0.1507,
"step": 165
},
{
"epoch": 12.18348623853211,
"grad_norm": 0.2150259017944336,
"learning_rate": 3.480474212128766e-05,
"loss": 0.1587,
"step": 166
},
{
"epoch": 12.256880733944953,
"grad_norm": 0.2322087585926056,
"learning_rate": 3.4166600309926387e-05,
"loss": 0.1678,
"step": 167
},
{
"epoch": 12.330275229357799,
"grad_norm": 0.24851596355438232,
"learning_rate": 3.3531312378396026e-05,
"loss": 0.1563,
"step": 168
},
{
"epoch": 12.403669724770642,
"grad_norm": 0.2380513846874237,
"learning_rate": 3.289899283371657e-05,
"loss": 0.159,
"step": 169
},
{
"epoch": 12.477064220183486,
"grad_norm": 0.24537961184978485,
"learning_rate": 3.226975564787322e-05,
"loss": 0.1635,
"step": 170
},
{
"epoch": 12.55045871559633,
"grad_norm": 0.2758086919784546,
"learning_rate": 3.164371423727362e-05,
"loss": 0.1585,
"step": 171
},
{
"epoch": 12.623853211009175,
"grad_norm": 0.2760733962059021,
"learning_rate": 3.1020981442305184e-05,
"loss": 0.1582,
"step": 172
},
{
"epoch": 12.697247706422019,
"grad_norm": 0.24997705221176147,
"learning_rate": 3.0401669506996256e-05,
"loss": 0.1586,
"step": 173
},
{
"epoch": 12.770642201834862,
"grad_norm": 0.23340976238250732,
"learning_rate": 2.978589005878476e-05,
"loss": 0.1475,
"step": 174
},
{
"epoch": 12.844036697247706,
"grad_norm": 0.24687667191028595,
"learning_rate": 2.917375408839803e-05,
"loss": 0.1545,
"step": 175
},
{
"epoch": 12.917431192660551,
"grad_norm": 0.2573287785053253,
"learning_rate": 2.8565371929847284e-05,
"loss": 0.1594,
"step": 176
},
{
"epoch": 12.990825688073395,
"grad_norm": 0.23869659006595612,
"learning_rate": 2.79608532405407e-05,
"loss": 0.1478,
"step": 177
},
{
"epoch": 13.064220183486238,
"grad_norm": 0.21892689168453217,
"learning_rate": 2.7360306981518146e-05,
"loss": 0.1537,
"step": 178
},
{
"epoch": 13.137614678899082,
"grad_norm": 0.25160327553749084,
"learning_rate": 2.6763841397811573e-05,
"loss": 0.1461,
"step": 179
},
{
"epoch": 13.211009174311927,
"grad_norm": 0.22473768889904022,
"learning_rate": 2.6171563998934605e-05,
"loss": 0.157,
"step": 180
},
{
"epoch": 13.284403669724771,
"grad_norm": 0.271699994802475,
"learning_rate": 2.5583581539504464e-05,
"loss": 0.1481,
"step": 181
},
{
"epoch": 13.357798165137615,
"grad_norm": 0.2539384961128235,
"learning_rate": 2.500000000000001e-05,
"loss": 0.1425,
"step": 182
},
{
"epoch": 13.431192660550458,
"grad_norm": 0.25691837072372437,
"learning_rate": 2.442092456765951e-05,
"loss": 0.1496,
"step": 183
},
{
"epoch": 13.504587155963304,
"grad_norm": 0.27063509821891785,
"learning_rate": 2.3846459617521128e-05,
"loss": 0.15,
"step": 184
},
{
"epoch": 13.577981651376147,
"grad_norm": 0.2871812582015991,
"learning_rate": 2.3276708693609943e-05,
"loss": 0.1449,
"step": 185
},
{
"epoch": 13.65137614678899,
"grad_norm": 0.28144845366477966,
"learning_rate": 2.2711774490274766e-05,
"loss": 0.1488,
"step": 186
},
{
"epoch": 13.724770642201834,
"grad_norm": 0.25536370277404785,
"learning_rate": 2.2151758833678045e-05,
"loss": 0.1499,
"step": 187
},
{
"epoch": 13.79816513761468,
"grad_norm": 0.276015043258667,
"learning_rate": 2.1596762663442218e-05,
"loss": 0.16,
"step": 188
},
{
"epoch": 13.871559633027523,
"grad_norm": 0.2981753945350647,
"learning_rate": 2.104688601445606e-05,
"loss": 0.1619,
"step": 189
},
{
"epoch": 13.944954128440367,
"grad_norm": 0.28739604353904724,
"learning_rate": 2.050222799884387e-05,
"loss": 0.143,
"step": 190
},
{
"epoch": 14.01834862385321,
"grad_norm": 0.2708568871021271,
"learning_rate": 1.996288678810105e-05,
"loss": 0.1489,
"step": 191
},
{
"epoch": 14.091743119266056,
"grad_norm": 0.2790358066558838,
"learning_rate": 1.942895959539939e-05,
"loss": 0.157,
"step": 192
},
{
"epoch": 14.1651376146789,
"grad_norm": 0.2205585092306137,
"learning_rate": 1.8900542658064807e-05,
"loss": 0.1438,
"step": 193
},
{
"epoch": 14.238532110091743,
"grad_norm": 0.23872052133083344,
"learning_rate": 1.837773122023114e-05,
"loss": 0.1447,
"step": 194
},
{
"epoch": 14.311926605504587,
"grad_norm": 0.2528257966041565,
"learning_rate": 1.7860619515673033e-05,
"loss": 0.1404,
"step": 195
},
{
"epoch": 14.385321100917432,
"grad_norm": 0.2546660900115967,
"learning_rate": 1.734930075082076e-05,
"loss": 0.1505,
"step": 196
},
{
"epoch": 14.458715596330276,
"grad_norm": 0.26501449942588806,
"learning_rate": 1.684386708796025e-05,
"loss": 0.1382,
"step": 197
},
{
"epoch": 14.53211009174312,
"grad_norm": 0.2743387520313263,
"learning_rate": 1.6344409628621484e-05,
"loss": 0.1436,
"step": 198
},
{
"epoch": 14.605504587155963,
"grad_norm": 0.26094284653663635,
"learning_rate": 1.585101839715792e-05,
"loss": 0.1425,
"step": 199
},
{
"epoch": 14.678899082568808,
"grad_norm": 0.2686833441257477,
"learning_rate": 1.536378232452003e-05,
"loss": 0.1393,
"step": 200
},
{
"epoch": 14.752293577981652,
"grad_norm": 0.29060205817222595,
"learning_rate": 1.4882789232226125e-05,
"loss": 0.1467,
"step": 201
},
{
"epoch": 14.825688073394495,
"grad_norm": 0.2699175477027893,
"learning_rate": 1.440812581653298e-05,
"loss": 0.1452,
"step": 202
},
{
"epoch": 14.899082568807339,
"grad_norm": 0.25926584005355835,
"learning_rate": 1.3939877632809278e-05,
"loss": 0.1425,
"step": 203
},
{
"epoch": 14.972477064220184,
"grad_norm": 0.27713659405708313,
"learning_rate": 1.3478129080114848e-05,
"loss": 0.1475,
"step": 204
},
{
"epoch": 15.045871559633028,
"grad_norm": 0.25950995087623596,
"learning_rate": 1.3022963385988151e-05,
"loss": 0.141,
"step": 205
},
{
"epoch": 15.119266055045872,
"grad_norm": 0.2496069371700287,
"learning_rate": 1.257446259144494e-05,
"loss": 0.1344,
"step": 206
},
{
"epoch": 15.192660550458715,
"grad_norm": 0.24646779894828796,
"learning_rate": 1.2132707536191008e-05,
"loss": 0.1505,
"step": 207
},
{
"epoch": 15.26605504587156,
"grad_norm": 0.23226536810398102,
"learning_rate": 1.1697777844051105e-05,
"loss": 0.1347,
"step": 208
},
{
"epoch": 15.339449541284404,
"grad_norm": 0.25484660267829895,
"learning_rate": 1.1269751908617277e-05,
"loss": 0.1399,
"step": 209
},
{
"epoch": 15.412844036697248,
"grad_norm": 0.25519728660583496,
"learning_rate": 1.0848706879118892e-05,
"loss": 0.141,
"step": 210
},
{
"epoch": 15.486238532110091,
"grad_norm": 0.2521832585334778,
"learning_rate": 1.0434718646516917e-05,
"loss": 0.1387,
"step": 211
},
{
"epoch": 15.559633027522935,
"grad_norm": 0.2603716552257538,
"learning_rate": 1.0027861829824952e-05,
"loss": 0.1401,
"step": 212
},
{
"epoch": 15.63302752293578,
"grad_norm": 0.2601398527622223,
"learning_rate": 9.628209762659657e-06,
"loss": 0.1288,
"step": 213
},
{
"epoch": 15.706422018348624,
"grad_norm": 0.2938661575317383,
"learning_rate": 9.235834480022787e-06,
"loss": 0.1454,
"step": 214
},
{
"epoch": 15.779816513761467,
"grad_norm": 0.2794564962387085,
"learning_rate": 8.850806705317183e-06,
"loss": 0.1555,
"step": 215
},
{
"epoch": 15.853211009174313,
"grad_norm": 0.2902175486087799,
"learning_rate": 8.473195837599418e-06,
"loss": 0.1436,
"step": 216
},
{
"epoch": 15.926605504587156,
"grad_norm": 0.3273734748363495,
"learning_rate": 8.103069939070945e-06,
"loss": 0.1355,
"step": 217
},
{
"epoch": 16.0,
"grad_norm": 0.29105839133262634,
"learning_rate": 7.740495722810271e-06,
"loss": 0.1455,
"step": 218
},
{
"epoch": 16.073394495412845,
"grad_norm": 0.3094259798526764,
"learning_rate": 7.385538540748327e-06,
"loss": 0.1421,
"step": 219
},
{
"epoch": 16.146788990825687,
"grad_norm": 0.2693122625350952,
"learning_rate": 7.038262371889159e-06,
"loss": 0.1426,
"step": 220
},
{
"epoch": 16.220183486238533,
"grad_norm": 0.24507200717926025,
"learning_rate": 6.698729810778065e-06,
"loss": 0.1419,
"step": 221
},
{
"epoch": 16.293577981651374,
"grad_norm": 0.256996750831604,
"learning_rate": 6.367002056219284e-06,
"loss": 0.1423,
"step": 222
},
{
"epoch": 16.36697247706422,
"grad_norm": 0.23636184632778168,
"learning_rate": 6.043138900245277e-06,
"loss": 0.134,
"step": 223
},
{
"epoch": 16.440366972477065,
"grad_norm": 0.2329455018043518,
"learning_rate": 5.727198717339511e-06,
"loss": 0.1313,
"step": 224
},
{
"epoch": 16.513761467889907,
"grad_norm": 0.26152482628822327,
"learning_rate": 5.41923845391486e-06,
"loss": 0.1371,
"step": 225
},
{
"epoch": 16.587155963302752,
"grad_norm": 0.24304719269275665,
"learning_rate": 5.1193136180493095e-06,
"loss": 0.1269,
"step": 226
},
{
"epoch": 16.660550458715598,
"grad_norm": 0.2667653262615204,
"learning_rate": 4.827478269480895e-06,
"loss": 0.1362,
"step": 227
},
{
"epoch": 16.73394495412844,
"grad_norm": 0.2727386951446533,
"learning_rate": 4.54378500986381e-06,
"loss": 0.1328,
"step": 228
},
{
"epoch": 16.807339449541285,
"grad_norm": 0.28039076924324036,
"learning_rate": 4.268284973287273e-06,
"loss": 0.14,
"step": 229
},
{
"epoch": 16.88073394495413,
"grad_norm": 0.2691219747066498,
"learning_rate": 4.001027817058789e-06,
"loss": 0.1424,
"step": 230
},
{
"epoch": 16.954128440366972,
"grad_norm": 0.2565619647502899,
"learning_rate": 3.7420617127538248e-06,
"loss": 0.1387,
"step": 231
},
{
"epoch": 17.027522935779817,
"grad_norm": 0.2617204189300537,
"learning_rate": 3.4914333375330898e-06,
"loss": 0.1355,
"step": 232
},
{
"epoch": 17.10091743119266,
"grad_norm": 0.24316667020320892,
"learning_rate": 3.249187865729264e-06,
"loss": 0.1365,
"step": 233
},
{
"epoch": 17.174311926605505,
"grad_norm": 0.26508602499961853,
"learning_rate": 3.0153689607045845e-06,
"loss": 0.1346,
"step": 234
},
{
"epoch": 17.24770642201835,
"grad_norm": 0.27173709869384766,
"learning_rate": 2.790018766980773e-06,
"loss": 0.1359,
"step": 235
},
{
"epoch": 17.321100917431192,
"grad_norm": 0.243478924036026,
"learning_rate": 2.573177902642726e-06,
"loss": 0.1431,
"step": 236
},
{
"epoch": 17.394495412844037,
"grad_norm": 0.2515186369419098,
"learning_rate": 2.3648854520173237e-06,
"loss": 0.142,
"step": 237
},
{
"epoch": 17.46788990825688,
"grad_norm": 0.2602452039718628,
"learning_rate": 2.1651789586287442e-06,
"loss": 0.1402,
"step": 238
},
{
"epoch": 17.541284403669724,
"grad_norm": 0.25888198614120483,
"learning_rate": 1.974094418431388e-06,
"loss": 0.1312,
"step": 239
},
{
"epoch": 17.61467889908257,
"grad_norm": 0.263540118932724,
"learning_rate": 1.7916662733218847e-06,
"loss": 0.1424,
"step": 240
},
{
"epoch": 17.68807339449541,
"grad_norm": 0.24854296445846558,
"learning_rate": 1.6179274049310966e-06,
"loss": 0.1341,
"step": 241
},
{
"epoch": 17.761467889908257,
"grad_norm": 0.24493998289108276,
"learning_rate": 1.4529091286973995e-06,
"loss": 0.1282,
"step": 242
},
{
"epoch": 17.834862385321102,
"grad_norm": 0.25934794545173645,
"learning_rate": 1.2966411882222696e-06,
"loss": 0.1438,
"step": 243
},
{
"epoch": 17.908256880733944,
"grad_norm": 0.2634919285774231,
"learning_rate": 1.1491517499091498e-06,
"loss": 0.1357,
"step": 244
},
{
"epoch": 17.98165137614679,
"grad_norm": 0.25140729546546936,
"learning_rate": 1.0104673978866164e-06,
"loss": 0.1269,
"step": 245
},
{
"epoch": 18.05504587155963,
"grad_norm": 0.26600712537765503,
"learning_rate": 8.806131292167618e-07,
"loss": 0.1343,
"step": 246
},
{
"epoch": 18.128440366972477,
"grad_norm": 0.2575834095478058,
"learning_rate": 7.596123493895991e-07,
"loss": 0.1394,
"step": 247
},
{
"epoch": 18.201834862385322,
"grad_norm": 0.2605387270450592,
"learning_rate": 6.474868681043578e-07,
"loss": 0.1442,
"step": 248
},
{
"epoch": 18.275229357798164,
"grad_norm": 0.26221585273742676,
"learning_rate": 5.442568953384186e-07,
"loss": 0.1407,
"step": 249
},
{
"epoch": 18.34862385321101,
"grad_norm": 0.24352163076400757,
"learning_rate": 4.4994103770457653e-07,
"loss": 0.1383,
"step": 250
},
{
"epoch": 18.422018348623855,
"grad_norm": 0.23631100356578827,
"learning_rate": 3.6455629509730136e-07,
"loss": 0.1282,
"step": 251
},
{
"epoch": 18.495412844036696,
"grad_norm": 0.24839530885219574,
"learning_rate": 2.8811805762860576e-07,
"loss": 0.1297,
"step": 252
},
{
"epoch": 18.568807339449542,
"grad_norm": 0.25721636414527893,
"learning_rate": 2.206401028540639e-07,
"loss": 0.1469,
"step": 253
},
{
"epoch": 18.642201834862384,
"grad_norm": 0.24413825571537018,
"learning_rate": 1.6213459328950352e-07,
"loss": 0.126,
"step": 254
},
{
"epoch": 18.71559633027523,
"grad_norm": 0.24766796827316284,
"learning_rate": 1.1261207421874309e-07,
"loss": 0.1397,
"step": 255
},
{
"epoch": 18.788990825688074,
"grad_norm": 0.24844591319561005,
"learning_rate": 7.208147179291192e-08,
"loss": 0.1355,
"step": 256
},
{
"epoch": 18.862385321100916,
"grad_norm": 0.2460290938615799,
"learning_rate": 4.055009142152067e-08,
"loss": 0.13,
"step": 257
},
{
"epoch": 18.93577981651376,
"grad_norm": 0.24734196066856384,
"learning_rate": 1.802361645573125e-08,
"loss": 0.1318,
"step": 258
},
{
"epoch": 19.009174311926607,
"grad_norm": 0.27149155735969543,
"learning_rate": 4.506107163948503e-09,
"loss": 0.1356,
"step": 259
},
{
"epoch": 19.08256880733945,
"grad_norm": 0.24715793132781982,
"learning_rate": 0.0,
"loss": 0.1384,
"step": 260
}
],
"logging_steps": 1.0,
"max_steps": 260,
"num_input_tokens_seen": 0,
"num_train_epochs": 20,
"save_steps": 130,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 4.241583344933929e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}