Phi-3-mini-4k-instruct-sa-v0.1 / trainer_state.json
sci-m-wang's picture
Upload 15 files
f63c0a5 verified
raw
history blame
93.7 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.9976558837318334,
"eval_steps": 500,
"global_step": 2665,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.009376465072667605,
"grad_norm": 0.20790895819664001,
"learning_rate": 4.999956573574533e-05,
"loss": 0.6093,
"step": 5
},
{
"epoch": 0.01875293014533521,
"grad_norm": 0.22529356181621552,
"learning_rate": 4.999826295806815e-05,
"loss": 0.5708,
"step": 10
},
{
"epoch": 0.02812939521800281,
"grad_norm": 0.19532211124897003,
"learning_rate": 4.999609171222846e-05,
"loss": 0.5403,
"step": 15
},
{
"epoch": 0.03750586029067042,
"grad_norm": 0.17083343863487244,
"learning_rate": 4.99930520736578e-05,
"loss": 0.559,
"step": 20
},
{
"epoch": 0.04688232536333802,
"grad_norm": 0.14475470781326294,
"learning_rate": 4.998914414795668e-05,
"loss": 0.5298,
"step": 25
},
{
"epoch": 0.05625879043600562,
"grad_norm": 0.1513879895210266,
"learning_rate": 4.99843680708909e-05,
"loss": 0.5082,
"step": 30
},
{
"epoch": 0.06563525550867323,
"grad_norm": 0.2249026745557785,
"learning_rate": 4.997872400838682e-05,
"loss": 0.5165,
"step": 35
},
{
"epoch": 0.07501172058134084,
"grad_norm": 0.16140912473201752,
"learning_rate": 4.997221215652562e-05,
"loss": 0.5224,
"step": 40
},
{
"epoch": 0.08438818565400844,
"grad_norm": 0.14945466816425323,
"learning_rate": 4.9964832741536444e-05,
"loss": 0.5322,
"step": 45
},
{
"epoch": 0.09376465072667604,
"grad_norm": 0.12949012219905853,
"learning_rate": 4.9956586019788584e-05,
"loss": 0.4886,
"step": 50
},
{
"epoch": 0.10314111579934365,
"grad_norm": 0.1388949751853943,
"learning_rate": 4.9947472277782584e-05,
"loss": 0.487,
"step": 55
},
{
"epoch": 0.11251758087201125,
"grad_norm": 0.1725090742111206,
"learning_rate": 4.993749183214021e-05,
"loss": 0.4749,
"step": 60
},
{
"epoch": 0.12189404594467886,
"grad_norm": 0.1453619748353958,
"learning_rate": 4.992664502959351e-05,
"loss": 0.4769,
"step": 65
},
{
"epoch": 0.13127051101734646,
"grad_norm": 0.1829695701599121,
"learning_rate": 4.991493224697281e-05,
"loss": 0.4587,
"step": 70
},
{
"epoch": 0.14064697609001406,
"grad_norm": 0.20768924057483673,
"learning_rate": 4.990235389119352e-05,
"loss": 0.4858,
"step": 75
},
{
"epoch": 0.15002344116268168,
"grad_norm": 0.18472038209438324,
"learning_rate": 4.9888910399242065e-05,
"loss": 0.4485,
"step": 80
},
{
"epoch": 0.15939990623534928,
"grad_norm": 0.20636047422885895,
"learning_rate": 4.987460223816067e-05,
"loss": 0.4629,
"step": 85
},
{
"epoch": 0.16877637130801687,
"grad_norm": 0.22416724264621735,
"learning_rate": 4.985942990503119e-05,
"loss": 0.4683,
"step": 90
},
{
"epoch": 0.1781528363806845,
"grad_norm": 0.17546997964382172,
"learning_rate": 4.984339392695777e-05,
"loss": 0.4625,
"step": 95
},
{
"epoch": 0.1875293014533521,
"grad_norm": 0.18117469549179077,
"learning_rate": 4.9826494861048576e-05,
"loss": 0.4346,
"step": 100
},
{
"epoch": 0.19690576652601968,
"grad_norm": 0.18969941139221191,
"learning_rate": 4.980873329439644e-05,
"loss": 0.4871,
"step": 105
},
{
"epoch": 0.2062822315986873,
"grad_norm": 0.1979440301656723,
"learning_rate": 4.979010984405842e-05,
"loss": 0.4564,
"step": 110
},
{
"epoch": 0.2156586966713549,
"grad_norm": 0.2334485948085785,
"learning_rate": 4.9770625157034436e-05,
"loss": 0.4801,
"step": 115
},
{
"epoch": 0.2250351617440225,
"grad_norm": 0.2206389456987381,
"learning_rate": 4.975027991024473e-05,
"loss": 0.4678,
"step": 120
},
{
"epoch": 0.23441162681669012,
"grad_norm": 0.19880324602127075,
"learning_rate": 4.972907481050637e-05,
"loss": 0.4794,
"step": 125
},
{
"epoch": 0.2437880918893577,
"grad_norm": 0.22166623175144196,
"learning_rate": 4.970701059450872e-05,
"loss": 0.4729,
"step": 130
},
{
"epoch": 0.25316455696202533,
"grad_norm": 0.19174839556217194,
"learning_rate": 4.968408802878778e-05,
"loss": 0.4754,
"step": 135
},
{
"epoch": 0.26254102203469293,
"grad_norm": 0.21328027546405792,
"learning_rate": 4.9660307909699645e-05,
"loss": 0.4374,
"step": 140
},
{
"epoch": 0.2719174871073605,
"grad_norm": 0.21817001700401306,
"learning_rate": 4.963567106339276e-05,
"loss": 0.4539,
"step": 145
},
{
"epoch": 0.2812939521800281,
"grad_norm": 0.22513534128665924,
"learning_rate": 4.961017834577927e-05,
"loss": 0.4713,
"step": 150
},
{
"epoch": 0.2906704172526957,
"grad_norm": 0.22900743782520294,
"learning_rate": 4.958383064250525e-05,
"loss": 0.4394,
"step": 155
},
{
"epoch": 0.30004688232536336,
"grad_norm": 0.2164304554462433,
"learning_rate": 4.955662886891995e-05,
"loss": 0.4462,
"step": 160
},
{
"epoch": 0.30942334739803096,
"grad_norm": 0.18812324106693268,
"learning_rate": 4.952857397004401e-05,
"loss": 0.475,
"step": 165
},
{
"epoch": 0.31879981247069855,
"grad_norm": 0.21450480818748474,
"learning_rate": 4.949966692053663e-05,
"loss": 0.462,
"step": 170
},
{
"epoch": 0.32817627754336615,
"grad_norm": 0.19328171014785767,
"learning_rate": 4.946990872466164e-05,
"loss": 0.4543,
"step": 175
},
{
"epoch": 0.33755274261603374,
"grad_norm": 0.2076808661222458,
"learning_rate": 4.943930041625272e-05,
"loss": 0.442,
"step": 180
},
{
"epoch": 0.34692920768870134,
"grad_norm": 0.207493394613266,
"learning_rate": 4.940784305867741e-05,
"loss": 0.4445,
"step": 185
},
{
"epoch": 0.356305672761369,
"grad_norm": 0.2177191525697708,
"learning_rate": 4.937553774480018e-05,
"loss": 0.4327,
"step": 190
},
{
"epoch": 0.3656821378340366,
"grad_norm": 0.22189687192440033,
"learning_rate": 4.934238559694448e-05,
"loss": 0.4804,
"step": 195
},
{
"epoch": 0.3750586029067042,
"grad_norm": 0.23572292923927307,
"learning_rate": 4.9308387766853725e-05,
"loss": 0.4701,
"step": 200
},
{
"epoch": 0.38443506797937177,
"grad_norm": 0.2647152841091156,
"learning_rate": 4.92735454356513e-05,
"loss": 0.4513,
"step": 205
},
{
"epoch": 0.39381153305203936,
"grad_norm": 0.2548702657222748,
"learning_rate": 4.9237859813799535e-05,
"loss": 0.4733,
"step": 210
},
{
"epoch": 0.40318799812470696,
"grad_norm": 0.22933240234851837,
"learning_rate": 4.9201332141057623e-05,
"loss": 0.4399,
"step": 215
},
{
"epoch": 0.4125644631973746,
"grad_norm": 0.24353943765163422,
"learning_rate": 4.9163963686438575e-05,
"loss": 0.4483,
"step": 220
},
{
"epoch": 0.4219409282700422,
"grad_norm": 0.23032227158546448,
"learning_rate": 4.912575574816511e-05,
"loss": 0.4415,
"step": 225
},
{
"epoch": 0.4313173933427098,
"grad_norm": 0.27580568194389343,
"learning_rate": 4.908670965362457e-05,
"loss": 0.4167,
"step": 230
},
{
"epoch": 0.4406938584153774,
"grad_norm": 0.20118704438209534,
"learning_rate": 4.9046826759322825e-05,
"loss": 0.4625,
"step": 235
},
{
"epoch": 0.450070323488045,
"grad_norm": 0.27328649163246155,
"learning_rate": 4.9006108450837095e-05,
"loss": 0.4473,
"step": 240
},
{
"epoch": 0.45944678856071264,
"grad_norm": 0.24165798723697662,
"learning_rate": 4.8964556142767845e-05,
"loss": 0.4512,
"step": 245
},
{
"epoch": 0.46882325363338023,
"grad_norm": 0.23875291645526886,
"learning_rate": 4.892217127868965e-05,
"loss": 0.4469,
"step": 250
},
{
"epoch": 0.4781997187060478,
"grad_norm": 0.2750314176082611,
"learning_rate": 4.8878955331101026e-05,
"loss": 0.4655,
"step": 255
},
{
"epoch": 0.4875761837787154,
"grad_norm": 0.23661385476589203,
"learning_rate": 4.8834909801373264e-05,
"loss": 0.4181,
"step": 260
},
{
"epoch": 0.496952648851383,
"grad_norm": 0.2660558223724365,
"learning_rate": 4.879003621969831e-05,
"loss": 0.4295,
"step": 265
},
{
"epoch": 0.5063291139240507,
"grad_norm": 0.26039206981658936,
"learning_rate": 4.874433614503554e-05,
"loss": 0.4398,
"step": 270
},
{
"epoch": 0.5157055789967182,
"grad_norm": 0.23488390445709229,
"learning_rate": 4.869781116505768e-05,
"loss": 0.4303,
"step": 275
},
{
"epoch": 0.5250820440693859,
"grad_norm": 0.25998032093048096,
"learning_rate": 4.8650462896095597e-05,
"loss": 0.4462,
"step": 280
},
{
"epoch": 0.5344585091420534,
"grad_norm": 0.2225557565689087,
"learning_rate": 4.860229298308213e-05,
"loss": 0.4321,
"step": 285
},
{
"epoch": 0.543834974214721,
"grad_norm": 0.2588254511356354,
"learning_rate": 4.8553303099495e-05,
"loss": 0.4253,
"step": 290
},
{
"epoch": 0.5532114392873887,
"grad_norm": 0.23887360095977783,
"learning_rate": 4.8503494947298634e-05,
"loss": 0.4438,
"step": 295
},
{
"epoch": 0.5625879043600562,
"grad_norm": 0.24014081060886383,
"learning_rate": 4.845287025688503e-05,
"loss": 0.4449,
"step": 300
},
{
"epoch": 0.5719643694327239,
"grad_norm": 0.23950184881687164,
"learning_rate": 4.8401430787013666e-05,
"loss": 0.4372,
"step": 305
},
{
"epoch": 0.5813408345053914,
"grad_norm": 0.23509977757930756,
"learning_rate": 4.8349178324750387e-05,
"loss": 0.4308,
"step": 310
},
{
"epoch": 0.5907172995780591,
"grad_norm": 0.25797203183174133,
"learning_rate": 4.8296114685405324e-05,
"loss": 0.429,
"step": 315
},
{
"epoch": 0.6000937646507267,
"grad_norm": 0.4092111885547638,
"learning_rate": 4.824224171246981e-05,
"loss": 0.4755,
"step": 320
},
{
"epoch": 0.6094702297233943,
"grad_norm": 0.2742483913898468,
"learning_rate": 4.8187561277552374e-05,
"loss": 0.4391,
"step": 325
},
{
"epoch": 0.6188466947960619,
"grad_norm": 0.26388296484947205,
"learning_rate": 4.813207528031366e-05,
"loss": 0.4184,
"step": 330
},
{
"epoch": 0.6282231598687295,
"grad_norm": 0.2813189923763275,
"learning_rate": 4.807578564840051e-05,
"loss": 0.4677,
"step": 335
},
{
"epoch": 0.6375996249413971,
"grad_norm": 0.24881592392921448,
"learning_rate": 4.801869433737891e-05,
"loss": 0.4366,
"step": 340
},
{
"epoch": 0.6469760900140648,
"grad_norm": 0.24432657659053802,
"learning_rate": 4.796080333066613e-05,
"loss": 0.4536,
"step": 345
},
{
"epoch": 0.6563525550867323,
"grad_norm": 0.26776596903800964,
"learning_rate": 4.790211463946174e-05,
"loss": 0.4678,
"step": 350
},
{
"epoch": 0.6657290201593999,
"grad_norm": 0.2862718403339386,
"learning_rate": 4.784263030267781e-05,
"loss": 0.4178,
"step": 355
},
{
"epoch": 0.6751054852320675,
"grad_norm": 0.25835174322128296,
"learning_rate": 4.7782352386868035e-05,
"loss": 0.457,
"step": 360
},
{
"epoch": 0.6844819503047351,
"grad_norm": 0.245767742395401,
"learning_rate": 4.7721282986155945e-05,
"loss": 0.4098,
"step": 365
},
{
"epoch": 0.6938584153774027,
"grad_norm": 0.31210237741470337,
"learning_rate": 4.7659424222162165e-05,
"loss": 0.4263,
"step": 370
},
{
"epoch": 0.7032348804500703,
"grad_norm": 0.27777954936027527,
"learning_rate": 4.7596778243930694e-05,
"loss": 0.4433,
"step": 375
},
{
"epoch": 0.712611345522738,
"grad_norm": 0.2539767324924469,
"learning_rate": 4.7533347227854265e-05,
"loss": 0.4369,
"step": 380
},
{
"epoch": 0.7219878105954055,
"grad_norm": 0.3035132586956024,
"learning_rate": 4.7469133377598695e-05,
"loss": 0.4206,
"step": 385
},
{
"epoch": 0.7313642756680732,
"grad_norm": 0.3070785105228424,
"learning_rate": 4.740413892402639e-05,
"loss": 0.4311,
"step": 390
},
{
"epoch": 0.7407407407407407,
"grad_norm": 0.29886579513549805,
"learning_rate": 4.7338366125118775e-05,
"loss": 0.4471,
"step": 395
},
{
"epoch": 0.7501172058134083,
"grad_norm": 0.4168696105480194,
"learning_rate": 4.727181726589789e-05,
"loss": 0.4567,
"step": 400
},
{
"epoch": 0.759493670886076,
"grad_norm": 0.4680975377559662,
"learning_rate": 4.7204494658346996e-05,
"loss": 0.4307,
"step": 405
},
{
"epoch": 0.7688701359587435,
"grad_norm": 0.2854766249656677,
"learning_rate": 4.713640064133025e-05,
"loss": 0.4303,
"step": 410
},
{
"epoch": 0.7782466010314112,
"grad_norm": 0.29096436500549316,
"learning_rate": 4.706753758051145e-05,
"loss": 0.4266,
"step": 415
},
{
"epoch": 0.7876230661040787,
"grad_norm": 0.2967214286327362,
"learning_rate": 4.699790786827188e-05,
"loss": 0.4401,
"step": 420
},
{
"epoch": 0.7969995311767464,
"grad_norm": 0.30379167199134827,
"learning_rate": 4.6927513923627124e-05,
"loss": 0.4343,
"step": 425
},
{
"epoch": 0.8063759962494139,
"grad_norm": 0.28345710039138794,
"learning_rate": 4.68563581921431e-05,
"loss": 0.4205,
"step": 430
},
{
"epoch": 0.8157524613220816,
"grad_norm": 0.3067210912704468,
"learning_rate": 4.6784443145851074e-05,
"loss": 0.3997,
"step": 435
},
{
"epoch": 0.8251289263947492,
"grad_norm": 0.2822760343551636,
"learning_rate": 4.671177128316176e-05,
"loss": 0.4103,
"step": 440
},
{
"epoch": 0.8345053914674168,
"grad_norm": 0.3110799491405487,
"learning_rate": 4.663834512877853e-05,
"loss": 0.4417,
"step": 445
},
{
"epoch": 0.8438818565400844,
"grad_norm": 0.3088453710079193,
"learning_rate": 4.6564167233609736e-05,
"loss": 0.4285,
"step": 450
},
{
"epoch": 0.853258321612752,
"grad_norm": 0.2757161259651184,
"learning_rate": 4.648924017468003e-05,
"loss": 0.4207,
"step": 455
},
{
"epoch": 0.8626347866854196,
"grad_norm": 0.3142268657684326,
"learning_rate": 4.6413566555040896e-05,
"loss": 0.4621,
"step": 460
},
{
"epoch": 0.8720112517580872,
"grad_norm": 0.2766186594963074,
"learning_rate": 4.633714900368018e-05,
"loss": 0.4201,
"step": 465
},
{
"epoch": 0.8813877168307548,
"grad_norm": 0.2814094126224518,
"learning_rate": 4.625999017543075e-05,
"loss": 0.4435,
"step": 470
},
{
"epoch": 0.8907641819034224,
"grad_norm": 0.29491695761680603,
"learning_rate": 4.618209275087829e-05,
"loss": 0.4527,
"step": 475
},
{
"epoch": 0.90014064697609,
"grad_norm": 0.30693337321281433,
"learning_rate": 4.610345943626817e-05,
"loss": 0.4268,
"step": 480
},
{
"epoch": 0.9095171120487576,
"grad_norm": 0.29456719756126404,
"learning_rate": 4.602409296341141e-05,
"loss": 0.4094,
"step": 485
},
{
"epoch": 0.9188935771214253,
"grad_norm": 0.2991769015789032,
"learning_rate": 4.5943996089589775e-05,
"loss": 0.4333,
"step": 490
},
{
"epoch": 0.9282700421940928,
"grad_norm": 0.3011597692966461,
"learning_rate": 4.586317159746001e-05,
"loss": 0.4821,
"step": 495
},
{
"epoch": 0.9376465072667605,
"grad_norm": 0.33786553144454956,
"learning_rate": 4.5781622294957136e-05,
"loss": 0.4228,
"step": 500
},
{
"epoch": 0.947022972339428,
"grad_norm": 0.28369849920272827,
"learning_rate": 4.569935101519692e-05,
"loss": 0.4333,
"step": 505
},
{
"epoch": 0.9563994374120957,
"grad_norm": 0.27438902854919434,
"learning_rate": 4.561636061637745e-05,
"loss": 0.4698,
"step": 510
},
{
"epoch": 0.9657759024847632,
"grad_norm": 0.26311373710632324,
"learning_rate": 4.553265398167981e-05,
"loss": 0.4211,
"step": 515
},
{
"epoch": 0.9751523675574308,
"grad_norm": 0.3168148994445801,
"learning_rate": 4.5448234019167945e-05,
"loss": 0.4183,
"step": 520
},
{
"epoch": 0.9845288326300985,
"grad_norm": 0.3082910478115082,
"learning_rate": 4.536310366168763e-05,
"loss": 0.4251,
"step": 525
},
{
"epoch": 0.993905297702766,
"grad_norm": 0.30115172266960144,
"learning_rate": 4.5277265866764565e-05,
"loss": 0.4292,
"step": 530
},
{
"epoch": 1.0032817627754336,
"grad_norm": 0.3527440130710602,
"learning_rate": 4.519072361650163e-05,
"loss": 0.4051,
"step": 535
},
{
"epoch": 1.0126582278481013,
"grad_norm": 0.3075549006462097,
"learning_rate": 4.5103479917475286e-05,
"loss": 0.4027,
"step": 540
},
{
"epoch": 1.0220346929207689,
"grad_norm": 0.3359437584877014,
"learning_rate": 4.501553780063113e-05,
"loss": 0.4008,
"step": 545
},
{
"epoch": 1.0314111579934364,
"grad_norm": 0.29366499185562134,
"learning_rate": 4.4926900321178595e-05,
"loss": 0.4421,
"step": 550
},
{
"epoch": 1.0407876230661042,
"grad_norm": 0.3159714937210083,
"learning_rate": 4.483757055848479e-05,
"loss": 0.4124,
"step": 555
},
{
"epoch": 1.0501640881387717,
"grad_norm": 0.30557411909103394,
"learning_rate": 4.4747551615967534e-05,
"loss": 0.4023,
"step": 560
},
{
"epoch": 1.0595405532114393,
"grad_norm": 0.3260650336742401,
"learning_rate": 4.4656846620987557e-05,
"loss": 0.4376,
"step": 565
},
{
"epoch": 1.0689170182841068,
"grad_norm": 0.3325200378894806,
"learning_rate": 4.4565458724739825e-05,
"loss": 0.4421,
"step": 570
},
{
"epoch": 1.0782934833567746,
"grad_norm": 0.3431427776813507,
"learning_rate": 4.447339110214405e-05,
"loss": 0.4119,
"step": 575
},
{
"epoch": 1.087669948429442,
"grad_norm": 0.3403518497943878,
"learning_rate": 4.438064695173446e-05,
"loss": 0.4276,
"step": 580
},
{
"epoch": 1.0970464135021096,
"grad_norm": 0.35619255900382996,
"learning_rate": 4.428722949554857e-05,
"loss": 0.4258,
"step": 585
},
{
"epoch": 1.1064228785747774,
"grad_norm": 0.2965317964553833,
"learning_rate": 4.419314197901537e-05,
"loss": 0.41,
"step": 590
},
{
"epoch": 1.115799343647445,
"grad_norm": 0.31006523966789246,
"learning_rate": 4.4098387670842466e-05,
"loss": 0.4292,
"step": 595
},
{
"epoch": 1.1251758087201125,
"grad_norm": 0.3238849639892578,
"learning_rate": 4.400296986290258e-05,
"loss": 0.4079,
"step": 600
},
{
"epoch": 1.1345522737927802,
"grad_norm": 0.33775433897972107,
"learning_rate": 4.390689187011917e-05,
"loss": 0.4123,
"step": 605
},
{
"epoch": 1.1439287388654478,
"grad_norm": 0.32464325428009033,
"learning_rate": 4.3810157030351276e-05,
"loss": 0.3983,
"step": 610
},
{
"epoch": 1.1533052039381153,
"grad_norm": 0.36141082644462585,
"learning_rate": 4.371276870427753e-05,
"loss": 0.3923,
"step": 615
},
{
"epoch": 1.1626816690107828,
"grad_norm": 0.3391363322734833,
"learning_rate": 4.3614730275279457e-05,
"loss": 0.402,
"step": 620
},
{
"epoch": 1.1720581340834506,
"grad_norm": 0.3580019772052765,
"learning_rate": 4.351604514932387e-05,
"loss": 0.4138,
"step": 625
},
{
"epoch": 1.1814345991561181,
"grad_norm": 0.3803277611732483,
"learning_rate": 4.341671675484459e-05,
"loss": 0.4254,
"step": 630
},
{
"epoch": 1.1908110642287857,
"grad_norm": 0.31499192118644714,
"learning_rate": 4.331674854262331e-05,
"loss": 0.4174,
"step": 635
},
{
"epoch": 1.2001875293014534,
"grad_norm": 0.34143874049186707,
"learning_rate": 4.321614398566972e-05,
"loss": 0.3954,
"step": 640
},
{
"epoch": 1.209563994374121,
"grad_norm": 0.328500360250473,
"learning_rate": 4.3114906579100853e-05,
"loss": 0.3944,
"step": 645
},
{
"epoch": 1.2189404594467885,
"grad_norm": 0.34014376997947693,
"learning_rate": 4.301303984001967e-05,
"loss": 0.4265,
"step": 650
},
{
"epoch": 1.228316924519456,
"grad_norm": 0.33687010407447815,
"learning_rate": 4.291054730739286e-05,
"loss": 0.4255,
"step": 655
},
{
"epoch": 1.2376933895921238,
"grad_norm": 0.3507842719554901,
"learning_rate": 4.2807432541927865e-05,
"loss": 0.424,
"step": 660
},
{
"epoch": 1.2470698546647914,
"grad_norm": 0.35059022903442383,
"learning_rate": 4.2703699125949245e-05,
"loss": 0.4227,
"step": 665
},
{
"epoch": 1.256446319737459,
"grad_norm": 0.381517618894577,
"learning_rate": 4.259935066327415e-05,
"loss": 0.4674,
"step": 670
},
{
"epoch": 1.2658227848101267,
"grad_norm": 0.309592604637146,
"learning_rate": 4.2494390779087187e-05,
"loss": 0.4365,
"step": 675
},
{
"epoch": 1.2751992498827942,
"grad_norm": 0.33022499084472656,
"learning_rate": 4.238882311981441e-05,
"loss": 0.3918,
"step": 680
},
{
"epoch": 1.2845757149554617,
"grad_norm": 0.37556394934654236,
"learning_rate": 4.228265135299669e-05,
"loss": 0.4257,
"step": 685
},
{
"epoch": 1.2939521800281293,
"grad_norm": 0.32622891664505005,
"learning_rate": 4.2175879167162304e-05,
"loss": 0.4442,
"step": 690
},
{
"epoch": 1.303328645100797,
"grad_norm": 0.3617080748081207,
"learning_rate": 4.206851027169871e-05,
"loss": 0.4292,
"step": 695
},
{
"epoch": 1.3127051101734646,
"grad_norm": 0.3132336735725403,
"learning_rate": 4.196054839672382e-05,
"loss": 0.4135,
"step": 700
},
{
"epoch": 1.3220815752461323,
"grad_norm": 0.315346360206604,
"learning_rate": 4.1851997292956255e-05,
"loss": 0.4163,
"step": 705
},
{
"epoch": 1.3314580403187999,
"grad_norm": 0.3571698069572449,
"learning_rate": 4.174286073158516e-05,
"loss": 0.4144,
"step": 710
},
{
"epoch": 1.3408345053914674,
"grad_norm": 0.35016143321990967,
"learning_rate": 4.163314250413913e-05,
"loss": 0.3898,
"step": 715
},
{
"epoch": 1.350210970464135,
"grad_norm": 0.34223607182502747,
"learning_rate": 4.152284642235452e-05,
"loss": 0.422,
"step": 720
},
{
"epoch": 1.3595874355368025,
"grad_norm": 0.3561420142650604,
"learning_rate": 4.141197631804298e-05,
"loss": 0.4279,
"step": 725
},
{
"epoch": 1.3689639006094703,
"grad_norm": 0.3987065255641937,
"learning_rate": 4.1300536042958354e-05,
"loss": 0.4233,
"step": 730
},
{
"epoch": 1.3783403656821378,
"grad_norm": 0.34799569845199585,
"learning_rate": 4.118852946866291e-05,
"loss": 0.4214,
"step": 735
},
{
"epoch": 1.3877168307548056,
"grad_norm": 0.4023985266685486,
"learning_rate": 4.107596048639274e-05,
"loss": 0.394,
"step": 740
},
{
"epoch": 1.397093295827473,
"grad_norm": 0.3554348647594452,
"learning_rate": 4.0962833006922675e-05,
"loss": 0.4283,
"step": 745
},
{
"epoch": 1.4064697609001406,
"grad_norm": 0.33551228046417236,
"learning_rate": 4.0849150960430356e-05,
"loss": 0.4164,
"step": 750
},
{
"epoch": 1.4158462259728082,
"grad_norm": 0.3325096368789673,
"learning_rate": 4.0734918296359716e-05,
"loss": 0.4242,
"step": 755
},
{
"epoch": 1.4252226910454757,
"grad_norm": 0.32375457882881165,
"learning_rate": 4.0620138983283785e-05,
"loss": 0.4356,
"step": 760
},
{
"epoch": 1.4345991561181435,
"grad_norm": 0.3310219943523407,
"learning_rate": 4.050481700876677e-05,
"loss": 0.4283,
"step": 765
},
{
"epoch": 1.443975621190811,
"grad_norm": 0.3990609347820282,
"learning_rate": 4.038895637922559e-05,
"loss": 0.4069,
"step": 770
},
{
"epoch": 1.4533520862634788,
"grad_norm": 0.35942110419273376,
"learning_rate": 4.027256111979063e-05,
"loss": 0.4103,
"step": 775
},
{
"epoch": 1.4627285513361463,
"grad_norm": 0.3540840744972229,
"learning_rate": 4.015563527416595e-05,
"loss": 0.4243,
"step": 780
},
{
"epoch": 1.4721050164088139,
"grad_norm": 0.3438728153705597,
"learning_rate": 4.003818290448876e-05,
"loss": 0.4222,
"step": 785
},
{
"epoch": 1.4814814814814814,
"grad_norm": 0.3385811150074005,
"learning_rate": 3.992020809118832e-05,
"loss": 0.4597,
"step": 790
},
{
"epoch": 1.4908579465541492,
"grad_norm": 0.3348815441131592,
"learning_rate": 3.980171493284418e-05,
"loss": 0.4049,
"step": 795
},
{
"epoch": 1.5002344116268167,
"grad_norm": 0.3877304494380951,
"learning_rate": 3.9682707546043785e-05,
"loss": 0.4573,
"step": 800
},
{
"epoch": 1.5096108766994845,
"grad_norm": 0.372578889131546,
"learning_rate": 3.9563190065239474e-05,
"loss": 0.4064,
"step": 805
},
{
"epoch": 1.518987341772152,
"grad_norm": 0.4140545725822449,
"learning_rate": 3.9443166642604814e-05,
"loss": 0.4125,
"step": 810
},
{
"epoch": 1.5283638068448195,
"grad_norm": 0.30370739102363586,
"learning_rate": 3.932264144789038e-05,
"loss": 0.3997,
"step": 815
},
{
"epoch": 1.537740271917487,
"grad_norm": 0.3417491018772125,
"learning_rate": 3.920161866827889e-05,
"loss": 0.4374,
"step": 820
},
{
"epoch": 1.5471167369901546,
"grad_norm": 0.3708433210849762,
"learning_rate": 3.908010250823972e-05,
"loss": 0.4273,
"step": 825
},
{
"epoch": 1.5564932020628222,
"grad_norm": 0.3969629108905792,
"learning_rate": 3.895809718938283e-05,
"loss": 0.4255,
"step": 830
},
{
"epoch": 1.56586966713549,
"grad_norm": 0.344990998506546,
"learning_rate": 3.883560695031213e-05,
"loss": 0.3804,
"step": 835
},
{
"epoch": 1.5752461322081577,
"grad_norm": 0.35197708010673523,
"learning_rate": 3.871263604647822e-05,
"loss": 0.4172,
"step": 840
},
{
"epoch": 1.5846225972808252,
"grad_norm": 0.35443294048309326,
"learning_rate": 3.858918875003053e-05,
"loss": 0.4132,
"step": 845
},
{
"epoch": 1.5939990623534928,
"grad_norm": 0.3435305953025818,
"learning_rate": 3.846526934966891e-05,
"loss": 0.4295,
"step": 850
},
{
"epoch": 1.6033755274261603,
"grad_norm": 0.39062219858169556,
"learning_rate": 3.834088215049464e-05,
"loss": 0.4305,
"step": 855
},
{
"epoch": 1.6127519924988278,
"grad_norm": 0.37184613943099976,
"learning_rate": 3.821603147386088e-05,
"loss": 0.4114,
"step": 860
},
{
"epoch": 1.6221284575714956,
"grad_norm": 0.3640553057193756,
"learning_rate": 3.80907216572225e-05,
"loss": 0.3906,
"step": 865
},
{
"epoch": 1.6315049226441631,
"grad_norm": 0.3880080580711365,
"learning_rate": 3.796495705398544e-05,
"loss": 0.4302,
"step": 870
},
{
"epoch": 1.640881387716831,
"grad_norm": 0.3895851671695709,
"learning_rate": 3.783874203335542e-05,
"loss": 0.4044,
"step": 875
},
{
"epoch": 1.6502578527894984,
"grad_norm": 0.4187454283237457,
"learning_rate": 3.77120809801862e-05,
"loss": 0.404,
"step": 880
},
{
"epoch": 1.659634317862166,
"grad_norm": 0.32384535670280457,
"learning_rate": 3.758497829482721e-05,
"loss": 0.439,
"step": 885
},
{
"epoch": 1.6690107829348335,
"grad_norm": 0.38500747084617615,
"learning_rate": 3.7457438392970686e-05,
"loss": 0.3843,
"step": 890
},
{
"epoch": 1.678387248007501,
"grad_norm": 0.40994030237197876,
"learning_rate": 3.732946570549825e-05,
"loss": 0.4189,
"step": 895
},
{
"epoch": 1.6877637130801688,
"grad_norm": 0.34361371397972107,
"learning_rate": 3.720106467832701e-05,
"loss": 0.4532,
"step": 900
},
{
"epoch": 1.6971401781528364,
"grad_norm": 0.37997591495513916,
"learning_rate": 3.707223977225507e-05,
"loss": 0.4298,
"step": 905
},
{
"epoch": 1.7065166432255041,
"grad_norm": 0.3335699439048767,
"learning_rate": 3.694299546280657e-05,
"loss": 0.4442,
"step": 910
},
{
"epoch": 1.7158931082981717,
"grad_norm": 0.38952916860580444,
"learning_rate": 3.681333624007623e-05,
"loss": 0.4263,
"step": 915
},
{
"epoch": 1.7252695733708392,
"grad_norm": 0.36404407024383545,
"learning_rate": 3.6683266608573286e-05,
"loss": 0.4905,
"step": 920
},
{
"epoch": 1.7346460384435067,
"grad_norm": 0.3932304084300995,
"learning_rate": 3.6552791087065075e-05,
"loss": 0.3976,
"step": 925
},
{
"epoch": 1.7440225035161743,
"grad_norm": 0.3757387697696686,
"learning_rate": 3.642191420842e-05,
"loss": 0.4259,
"step": 930
},
{
"epoch": 1.753398968588842,
"grad_norm": 0.36103346943855286,
"learning_rate": 3.6290640519450074e-05,
"loss": 0.4184,
"step": 935
},
{
"epoch": 1.7627754336615096,
"grad_norm": 0.3944946229457855,
"learning_rate": 3.6158974580752954e-05,
"loss": 0.451,
"step": 940
},
{
"epoch": 1.7721518987341773,
"grad_norm": 0.4115646183490753,
"learning_rate": 3.60269209665535e-05,
"loss": 0.4075,
"step": 945
},
{
"epoch": 1.7815283638068449,
"grad_norm": 0.36925530433654785,
"learning_rate": 3.589448426454486e-05,
"loss": 0.432,
"step": 950
},
{
"epoch": 1.7909048288795124,
"grad_norm": 0.39060622453689575,
"learning_rate": 3.5761669075729084e-05,
"loss": 0.3979,
"step": 955
},
{
"epoch": 1.80028129395218,
"grad_norm": 0.3404456377029419,
"learning_rate": 3.562848001425729e-05,
"loss": 0.458,
"step": 960
},
{
"epoch": 1.8096577590248475,
"grad_norm": 0.3717631995677948,
"learning_rate": 3.549492170726937e-05,
"loss": 0.4111,
"step": 965
},
{
"epoch": 1.8190342240975153,
"grad_norm": 0.3680458068847656,
"learning_rate": 3.53609987947332e-05,
"loss": 0.4276,
"step": 970
},
{
"epoch": 1.8284106891701828,
"grad_norm": 0.3876858651638031,
"learning_rate": 3.5226715929283506e-05,
"loss": 0.3976,
"step": 975
},
{
"epoch": 1.8377871542428506,
"grad_norm": 0.3375720679759979,
"learning_rate": 3.509207777606013e-05,
"loss": 0.3993,
"step": 980
},
{
"epoch": 1.847163619315518,
"grad_norm": 0.3920380771160126,
"learning_rate": 3.49570890125461e-05,
"loss": 0.4019,
"step": 985
},
{
"epoch": 1.8565400843881856,
"grad_norm": 0.35098692774772644,
"learning_rate": 3.482175432840495e-05,
"loss": 0.3951,
"step": 990
},
{
"epoch": 1.8659165494608532,
"grad_norm": 0.3828470706939697,
"learning_rate": 3.468607842531797e-05,
"loss": 0.4119,
"step": 995
},
{
"epoch": 1.8752930145335207,
"grad_norm": 0.3853389620780945,
"learning_rate": 3.455006601682075e-05,
"loss": 0.3997,
"step": 1000
},
{
"epoch": 1.8846694796061885,
"grad_norm": 0.36217638850212097,
"learning_rate": 3.441372182813946e-05,
"loss": 0.3949,
"step": 1005
},
{
"epoch": 1.8940459446788562,
"grad_norm": 0.38590380549430847,
"learning_rate": 3.427705059602671e-05,
"loss": 0.4041,
"step": 1010
},
{
"epoch": 1.9034224097515238,
"grad_norm": 0.3818942606449127,
"learning_rate": 3.414005706859693e-05,
"loss": 0.4247,
"step": 1015
},
{
"epoch": 1.9127988748241913,
"grad_norm": 0.36795973777770996,
"learning_rate": 3.400274600516152e-05,
"loss": 0.406,
"step": 1020
},
{
"epoch": 1.9221753398968588,
"grad_norm": 0.37341007590293884,
"learning_rate": 3.386512217606339e-05,
"loss": 0.405,
"step": 1025
},
{
"epoch": 1.9315518049695264,
"grad_norm": 0.3815973699092865,
"learning_rate": 3.372719036251132e-05,
"loss": 0.4093,
"step": 1030
},
{
"epoch": 1.9409282700421941,
"grad_norm": 0.32503455877304077,
"learning_rate": 3.3588955356413795e-05,
"loss": 0.409,
"step": 1035
},
{
"epoch": 1.9503047351148617,
"grad_norm": 0.41935157775878906,
"learning_rate": 3.3450421960212566e-05,
"loss": 0.4052,
"step": 1040
},
{
"epoch": 1.9596812001875294,
"grad_norm": 0.3876263201236725,
"learning_rate": 3.3311594986715814e-05,
"loss": 0.4271,
"step": 1045
},
{
"epoch": 1.969057665260197,
"grad_norm": 0.37812694907188416,
"learning_rate": 3.317247925893089e-05,
"loss": 0.3977,
"step": 1050
},
{
"epoch": 1.9784341303328645,
"grad_norm": 0.39891424775123596,
"learning_rate": 3.3033079609896834e-05,
"loss": 0.3831,
"step": 1055
},
{
"epoch": 1.987810595405532,
"grad_norm": 0.3980333209037781,
"learning_rate": 3.289340088251642e-05,
"loss": 0.3874,
"step": 1060
},
{
"epoch": 1.9971870604781996,
"grad_norm": 0.3976622521877289,
"learning_rate": 3.275344792938791e-05,
"loss": 0.4092,
"step": 1065
},
{
"epoch": 2.006563525550867,
"grad_norm": 0.34287935495376587,
"learning_rate": 3.2613225612636525e-05,
"loss": 0.3795,
"step": 1070
},
{
"epoch": 2.015939990623535,
"grad_norm": 0.3522503674030304,
"learning_rate": 3.247273880374542e-05,
"loss": 0.3895,
"step": 1075
},
{
"epoch": 2.0253164556962027,
"grad_norm": 0.4062464237213135,
"learning_rate": 3.2331992383386566e-05,
"loss": 0.4279,
"step": 1080
},
{
"epoch": 2.03469292076887,
"grad_norm": 0.3659546375274658,
"learning_rate": 3.21909912412511e-05,
"loss": 0.4056,
"step": 1085
},
{
"epoch": 2.0440693858415377,
"grad_norm": 0.3941885232925415,
"learning_rate": 3.2049740275879493e-05,
"loss": 0.4142,
"step": 1090
},
{
"epoch": 2.0534458509142053,
"grad_norm": 0.43663620948791504,
"learning_rate": 3.190824439449137e-05,
"loss": 0.431,
"step": 1095
},
{
"epoch": 2.062822315986873,
"grad_norm": 0.40177416801452637,
"learning_rate": 3.176650851281499e-05,
"loss": 0.3952,
"step": 1100
},
{
"epoch": 2.0721987810595404,
"grad_norm": 0.3755819797515869,
"learning_rate": 3.162453755491655e-05,
"loss": 0.3717,
"step": 1105
},
{
"epoch": 2.0815752461322083,
"grad_norm": 0.3558565676212311,
"learning_rate": 3.1482336453028986e-05,
"loss": 0.392,
"step": 1110
},
{
"epoch": 2.090951711204876,
"grad_norm": 0.3933524191379547,
"learning_rate": 3.133991014738076e-05,
"loss": 0.4004,
"step": 1115
},
{
"epoch": 2.1003281762775434,
"grad_norm": 0.3724795877933502,
"learning_rate": 3.1197263586024155e-05,
"loss": 0.4095,
"step": 1120
},
{
"epoch": 2.109704641350211,
"grad_norm": 0.4012450873851776,
"learning_rate": 3.105440172466337e-05,
"loss": 0.4169,
"step": 1125
},
{
"epoch": 2.1190811064228785,
"grad_norm": 0.43174034357070923,
"learning_rate": 3.09113295264824e-05,
"loss": 0.4105,
"step": 1130
},
{
"epoch": 2.128457571495546,
"grad_norm": 0.37383630871772766,
"learning_rate": 3.076805196197255e-05,
"loss": 0.4085,
"step": 1135
},
{
"epoch": 2.1378340365682136,
"grad_norm": 0.40011560916900635,
"learning_rate": 3.0624574008759805e-05,
"loss": 0.4704,
"step": 1140
},
{
"epoch": 2.1472105016408816,
"grad_norm": 0.38523784279823303,
"learning_rate": 3.0480900651431876e-05,
"loss": 0.3913,
"step": 1145
},
{
"epoch": 2.156586966713549,
"grad_norm": 0.39474254846572876,
"learning_rate": 3.0337036881365045e-05,
"loss": 0.3904,
"step": 1150
},
{
"epoch": 2.1659634317862166,
"grad_norm": 0.3798459768295288,
"learning_rate": 3.0192987696550746e-05,
"loss": 0.3899,
"step": 1155
},
{
"epoch": 2.175339896858884,
"grad_norm": 0.42982256412506104,
"learning_rate": 3.0048758101421914e-05,
"loss": 0.4025,
"step": 1160
},
{
"epoch": 2.1847163619315517,
"grad_norm": 0.40922918915748596,
"learning_rate": 2.9904353106679178e-05,
"loss": 0.4038,
"step": 1165
},
{
"epoch": 2.1940928270042193,
"grad_norm": 0.3726600408554077,
"learning_rate": 2.975977772911671e-05,
"loss": 0.4396,
"step": 1170
},
{
"epoch": 2.2034692920768872,
"grad_norm": 0.4845339357852936,
"learning_rate": 2.9615036991448015e-05,
"loss": 0.4164,
"step": 1175
},
{
"epoch": 2.212845757149555,
"grad_norm": 0.4207274317741394,
"learning_rate": 2.947013592213137e-05,
"loss": 0.4128,
"step": 1180
},
{
"epoch": 2.2222222222222223,
"grad_norm": 0.3607243299484253,
"learning_rate": 2.9325079555195163e-05,
"loss": 0.4096,
"step": 1185
},
{
"epoch": 2.23159868729489,
"grad_norm": 0.4174239933490753,
"learning_rate": 2.9179872930063e-05,
"loss": 0.3859,
"step": 1190
},
{
"epoch": 2.2409751523675574,
"grad_norm": 0.46482330560684204,
"learning_rate": 2.9034521091378635e-05,
"loss": 0.3905,
"step": 1195
},
{
"epoch": 2.250351617440225,
"grad_norm": 0.43692639470100403,
"learning_rate": 2.8889029088830686e-05,
"loss": 0.4121,
"step": 1200
},
{
"epoch": 2.2597280825128925,
"grad_norm": 0.40547794103622437,
"learning_rate": 2.8743401976977257e-05,
"loss": 0.4,
"step": 1205
},
{
"epoch": 2.2691045475855605,
"grad_norm": 0.3966444432735443,
"learning_rate": 2.8597644815070263e-05,
"loss": 0.4482,
"step": 1210
},
{
"epoch": 2.278481012658228,
"grad_norm": 0.41211700439453125,
"learning_rate": 2.845176266687974e-05,
"loss": 0.3914,
"step": 1215
},
{
"epoch": 2.2878574777308955,
"grad_norm": 0.45993107557296753,
"learning_rate": 2.8305760600517862e-05,
"loss": 0.3862,
"step": 1220
},
{
"epoch": 2.297233942803563,
"grad_norm": 0.4274505078792572,
"learning_rate": 2.815964368826292e-05,
"loss": 0.4039,
"step": 1225
},
{
"epoch": 2.3066104078762306,
"grad_norm": 0.4318040609359741,
"learning_rate": 2.8013417006383076e-05,
"loss": 0.3868,
"step": 1230
},
{
"epoch": 2.315986872948898,
"grad_norm": 0.43052956461906433,
"learning_rate": 2.7867085634960016e-05,
"loss": 0.4118,
"step": 1235
},
{
"epoch": 2.3253633380215657,
"grad_norm": 0.4302162528038025,
"learning_rate": 2.772065465771244e-05,
"loss": 0.407,
"step": 1240
},
{
"epoch": 2.3347398030942337,
"grad_norm": 0.46215954422950745,
"learning_rate": 2.7574129161819495e-05,
"loss": 0.4101,
"step": 1245
},
{
"epoch": 2.344116268166901,
"grad_norm": 0.37758246064186096,
"learning_rate": 2.7427514237744e-05,
"loss": 0.4205,
"step": 1250
},
{
"epoch": 2.3534927332395688,
"grad_norm": 0.40978917479515076,
"learning_rate": 2.7280814979055612e-05,
"loss": 0.4116,
"step": 1255
},
{
"epoch": 2.3628691983122363,
"grad_norm": 0.41880902647972107,
"learning_rate": 2.713403648225388e-05,
"loss": 0.3917,
"step": 1260
},
{
"epoch": 2.372245663384904,
"grad_norm": 0.39450138807296753,
"learning_rate": 2.698718384659114e-05,
"loss": 0.41,
"step": 1265
},
{
"epoch": 2.3816221284575714,
"grad_norm": 0.4140292704105377,
"learning_rate": 2.684026217389544e-05,
"loss": 0.4449,
"step": 1270
},
{
"epoch": 2.390998593530239,
"grad_norm": 0.40103039145469666,
"learning_rate": 2.6693276568393245e-05,
"loss": 0.3831,
"step": 1275
},
{
"epoch": 2.400375058602907,
"grad_norm": 0.4108443558216095,
"learning_rate": 2.6546232136532083e-05,
"loss": 0.4321,
"step": 1280
},
{
"epoch": 2.4097515236755744,
"grad_norm": 0.39723923802375793,
"learning_rate": 2.639913398680322e-05,
"loss": 0.4236,
"step": 1285
},
{
"epoch": 2.419127988748242,
"grad_norm": 0.36689993739128113,
"learning_rate": 2.6251987229564123e-05,
"loss": 0.4029,
"step": 1290
},
{
"epoch": 2.4285044538209095,
"grad_norm": 0.44802841544151306,
"learning_rate": 2.610479697686093e-05,
"loss": 0.3794,
"step": 1295
},
{
"epoch": 2.437880918893577,
"grad_norm": 0.4667953848838806,
"learning_rate": 2.595756834225089e-05,
"loss": 0.4178,
"step": 1300
},
{
"epoch": 2.4472573839662446,
"grad_norm": 0.41082021594047546,
"learning_rate": 2.5810306440624644e-05,
"loss": 0.4496,
"step": 1305
},
{
"epoch": 2.456633849038912,
"grad_norm": 0.4804684519767761,
"learning_rate": 2.566301638802861e-05,
"loss": 0.4215,
"step": 1310
},
{
"epoch": 2.46601031411158,
"grad_norm": 0.4113605320453644,
"learning_rate": 2.551570330148716e-05,
"loss": 0.3964,
"step": 1315
},
{
"epoch": 2.4753867791842477,
"grad_norm": 0.40913546085357666,
"learning_rate": 2.5368372298824922e-05,
"loss": 0.3871,
"step": 1320
},
{
"epoch": 2.484763244256915,
"grad_norm": 0.45879778265953064,
"learning_rate": 2.5221028498488947e-05,
"loss": 0.4146,
"step": 1325
},
{
"epoch": 2.4941397093295827,
"grad_norm": 0.46979445219039917,
"learning_rate": 2.507367701937087e-05,
"loss": 0.3892,
"step": 1330
},
{
"epoch": 2.5035161744022503,
"grad_norm": 0.3892490565776825,
"learning_rate": 2.492632298062913e-05,
"loss": 0.3995,
"step": 1335
},
{
"epoch": 2.512892639474918,
"grad_norm": 0.4790550768375397,
"learning_rate": 2.4778971501511063e-05,
"loss": 0.423,
"step": 1340
},
{
"epoch": 2.5222691045475853,
"grad_norm": 0.41578853130340576,
"learning_rate": 2.4631627701175084e-05,
"loss": 0.3845,
"step": 1345
},
{
"epoch": 2.5316455696202533,
"grad_norm": 0.457792192697525,
"learning_rate": 2.448429669851285e-05,
"loss": 0.3854,
"step": 1350
},
{
"epoch": 2.541022034692921,
"grad_norm": 0.44336166977882385,
"learning_rate": 2.43369836119714e-05,
"loss": 0.4096,
"step": 1355
},
{
"epoch": 2.5503984997655884,
"grad_norm": 0.4408751428127289,
"learning_rate": 2.4189693559375365e-05,
"loss": 0.4167,
"step": 1360
},
{
"epoch": 2.559774964838256,
"grad_norm": 0.4089582860469818,
"learning_rate": 2.4042431657749117e-05,
"loss": 0.3829,
"step": 1365
},
{
"epoch": 2.5691514299109235,
"grad_norm": 0.4729689657688141,
"learning_rate": 2.3895203023139073e-05,
"loss": 0.3861,
"step": 1370
},
{
"epoch": 2.578527894983591,
"grad_norm": 0.3834232985973358,
"learning_rate": 2.3748012770435883e-05,
"loss": 0.408,
"step": 1375
},
{
"epoch": 2.5879043600562586,
"grad_norm": 0.47135239839553833,
"learning_rate": 2.3600866013196787e-05,
"loss": 0.408,
"step": 1380
},
{
"epoch": 2.5972808251289266,
"grad_norm": 0.4259538948535919,
"learning_rate": 2.3453767863467923e-05,
"loss": 0.3994,
"step": 1385
},
{
"epoch": 2.606657290201594,
"grad_norm": 0.4320566654205322,
"learning_rate": 2.3306723431606758e-05,
"loss": 0.3801,
"step": 1390
},
{
"epoch": 2.6160337552742616,
"grad_norm": 0.3922535479068756,
"learning_rate": 2.3159737826104565e-05,
"loss": 0.421,
"step": 1395
},
{
"epoch": 2.625410220346929,
"grad_norm": 0.4332144558429718,
"learning_rate": 2.3012816153408863e-05,
"loss": 0.4058,
"step": 1400
},
{
"epoch": 2.6347866854195967,
"grad_norm": 0.46418026089668274,
"learning_rate": 2.286596351774613e-05,
"loss": 0.3922,
"step": 1405
},
{
"epoch": 2.6441631504922647,
"grad_norm": 0.4476454555988312,
"learning_rate": 2.271918502094439e-05,
"loss": 0.3822,
"step": 1410
},
{
"epoch": 2.653539615564932,
"grad_norm": 0.4266027510166168,
"learning_rate": 2.2572485762256005e-05,
"loss": 0.382,
"step": 1415
},
{
"epoch": 2.6629160806375998,
"grad_norm": 0.5318887233734131,
"learning_rate": 2.2425870838180507e-05,
"loss": 0.3881,
"step": 1420
},
{
"epoch": 2.6722925457102673,
"grad_norm": 0.4814053177833557,
"learning_rate": 2.2279345342287567e-05,
"loss": 0.4004,
"step": 1425
},
{
"epoch": 2.681669010782935,
"grad_norm": 0.43376603722572327,
"learning_rate": 2.2132914365039993e-05,
"loss": 0.4052,
"step": 1430
},
{
"epoch": 2.6910454758556024,
"grad_norm": 0.4502093493938446,
"learning_rate": 2.1986582993616926e-05,
"loss": 0.4018,
"step": 1435
},
{
"epoch": 2.70042194092827,
"grad_norm": 0.41802868247032166,
"learning_rate": 2.1840356311737084e-05,
"loss": 0.3969,
"step": 1440
},
{
"epoch": 2.709798406000938,
"grad_norm": 0.37022143602371216,
"learning_rate": 2.169423939948215e-05,
"loss": 0.421,
"step": 1445
},
{
"epoch": 2.719174871073605,
"grad_norm": 0.47152194380760193,
"learning_rate": 2.154823733312027e-05,
"loss": 0.4105,
"step": 1450
},
{
"epoch": 2.728551336146273,
"grad_norm": 0.4552992582321167,
"learning_rate": 2.140235518492975e-05,
"loss": 0.3879,
"step": 1455
},
{
"epoch": 2.7379278012189405,
"grad_norm": 0.42312508821487427,
"learning_rate": 2.125659802302275e-05,
"loss": 0.4014,
"step": 1460
},
{
"epoch": 2.747304266291608,
"grad_norm": 0.37846195697784424,
"learning_rate": 2.1110970911169316e-05,
"loss": 0.4116,
"step": 1465
},
{
"epoch": 2.7566807313642756,
"grad_norm": 0.42782410979270935,
"learning_rate": 2.096547890862137e-05,
"loss": 0.394,
"step": 1470
},
{
"epoch": 2.766057196436943,
"grad_norm": 0.4843522310256958,
"learning_rate": 2.0820127069937008e-05,
"loss": 0.3984,
"step": 1475
},
{
"epoch": 2.775433661509611,
"grad_norm": 0.46708783507347107,
"learning_rate": 2.0674920444804847e-05,
"loss": 0.3906,
"step": 1480
},
{
"epoch": 2.7848101265822782,
"grad_norm": 0.4305874705314636,
"learning_rate": 2.0529864077868643e-05,
"loss": 0.4101,
"step": 1485
},
{
"epoch": 2.794186591654946,
"grad_norm": 0.40540897846221924,
"learning_rate": 2.0384963008551995e-05,
"loss": 0.4056,
"step": 1490
},
{
"epoch": 2.8035630567276137,
"grad_norm": 0.4375884532928467,
"learning_rate": 2.0240222270883288e-05,
"loss": 0.4285,
"step": 1495
},
{
"epoch": 2.8129395218002813,
"grad_norm": 0.4903147220611572,
"learning_rate": 2.0095646893320828e-05,
"loss": 0.3852,
"step": 1500
},
{
"epoch": 2.822315986872949,
"grad_norm": 0.4598534107208252,
"learning_rate": 1.9951241898578085e-05,
"loss": 0.3815,
"step": 1505
},
{
"epoch": 2.8316924519456164,
"grad_norm": 0.4689639210700989,
"learning_rate": 1.980701230344926e-05,
"loss": 0.4003,
"step": 1510
},
{
"epoch": 2.8410689170182843,
"grad_norm": 0.4126102924346924,
"learning_rate": 1.9662963118634954e-05,
"loss": 0.419,
"step": 1515
},
{
"epoch": 2.8504453820909514,
"grad_norm": 0.5043439865112305,
"learning_rate": 1.9519099348568127e-05,
"loss": 0.3939,
"step": 1520
},
{
"epoch": 2.8598218471636194,
"grad_norm": 1.107731580734253,
"learning_rate": 1.93754259912402e-05,
"loss": 0.4255,
"step": 1525
},
{
"epoch": 2.869198312236287,
"grad_norm": 0.43347910046577454,
"learning_rate": 1.9231948038027462e-05,
"loss": 0.3898,
"step": 1530
},
{
"epoch": 2.8785747773089545,
"grad_norm": 0.42569872736930847,
"learning_rate": 1.9088670473517605e-05,
"loss": 0.404,
"step": 1535
},
{
"epoch": 2.887951242381622,
"grad_norm": 0.42402687668800354,
"learning_rate": 1.8945598275336633e-05,
"loss": 0.3834,
"step": 1540
},
{
"epoch": 2.8973277074542896,
"grad_norm": 0.47469502687454224,
"learning_rate": 1.8802736413975844e-05,
"loss": 0.4378,
"step": 1545
},
{
"epoch": 2.9067041725269576,
"grad_norm": 0.43439674377441406,
"learning_rate": 1.866008985261924e-05,
"loss": 0.3953,
"step": 1550
},
{
"epoch": 2.916080637599625,
"grad_norm": 0.4604777693748474,
"learning_rate": 1.8517663546971013e-05,
"loss": 0.401,
"step": 1555
},
{
"epoch": 2.9254571026722926,
"grad_norm": 0.46638715267181396,
"learning_rate": 1.8375462445083464e-05,
"loss": 0.4101,
"step": 1560
},
{
"epoch": 2.93483356774496,
"grad_norm": 0.46090996265411377,
"learning_rate": 1.8233491487185006e-05,
"loss": 0.4172,
"step": 1565
},
{
"epoch": 2.9442100328176277,
"grad_norm": 0.4011158049106598,
"learning_rate": 1.8091755605508643e-05,
"loss": 0.4161,
"step": 1570
},
{
"epoch": 2.9535864978902953,
"grad_norm": 0.48108768463134766,
"learning_rate": 1.7950259724120512e-05,
"loss": 0.4291,
"step": 1575
},
{
"epoch": 2.962962962962963,
"grad_norm": 0.4274104833602905,
"learning_rate": 1.7809008758748913e-05,
"loss": 0.4102,
"step": 1580
},
{
"epoch": 2.972339428035631,
"grad_norm": 0.46605607867240906,
"learning_rate": 1.766800761661344e-05,
"loss": 0.3811,
"step": 1585
},
{
"epoch": 2.9817158931082983,
"grad_norm": 0.5101811289787292,
"learning_rate": 1.752726119625459e-05,
"loss": 0.4292,
"step": 1590
},
{
"epoch": 2.991092358180966,
"grad_norm": 0.45644745230674744,
"learning_rate": 1.7386774387363484e-05,
"loss": 0.4238,
"step": 1595
},
{
"epoch": 3.0004688232536334,
"grad_norm": 0.4713057577610016,
"learning_rate": 1.724655207061209e-05,
"loss": 0.4033,
"step": 1600
},
{
"epoch": 3.009845288326301,
"grad_norm": 0.43844765424728394,
"learning_rate": 1.710659911748359e-05,
"loss": 0.4157,
"step": 1605
},
{
"epoch": 3.0192217533989685,
"grad_norm": 0.46663135290145874,
"learning_rate": 1.696692039010317e-05,
"loss": 0.39,
"step": 1610
},
{
"epoch": 3.028598218471636,
"grad_norm": 0.433660626411438,
"learning_rate": 1.6827520741069118e-05,
"loss": 0.4134,
"step": 1615
},
{
"epoch": 3.037974683544304,
"grad_norm": 0.4730021059513092,
"learning_rate": 1.6688405013284192e-05,
"loss": 0.4095,
"step": 1620
},
{
"epoch": 3.0473511486169715,
"grad_norm": 0.3873763680458069,
"learning_rate": 1.6549578039787436e-05,
"loss": 0.406,
"step": 1625
},
{
"epoch": 3.056727613689639,
"grad_norm": 0.4337627589702606,
"learning_rate": 1.6411044643586204e-05,
"loss": 0.3721,
"step": 1630
},
{
"epoch": 3.0661040787623066,
"grad_norm": 0.4677801728248596,
"learning_rate": 1.627280963748869e-05,
"loss": 0.3739,
"step": 1635
},
{
"epoch": 3.075480543834974,
"grad_norm": 0.4103066921234131,
"learning_rate": 1.613487782393661e-05,
"loss": 0.4147,
"step": 1640
},
{
"epoch": 3.0848570089076417,
"grad_norm": 0.48415660858154297,
"learning_rate": 1.5997253994838484e-05,
"loss": 0.3684,
"step": 1645
},
{
"epoch": 3.0942334739803092,
"grad_norm": 0.5216359496116638,
"learning_rate": 1.5859942931403072e-05,
"loss": 0.3977,
"step": 1650
},
{
"epoch": 3.103609939052977,
"grad_norm": 0.46912798285484314,
"learning_rate": 1.5722949403973308e-05,
"loss": 0.4219,
"step": 1655
},
{
"epoch": 3.1129864041256448,
"grad_norm": 0.5608705282211304,
"learning_rate": 1.5586278171860546e-05,
"loss": 0.3957,
"step": 1660
},
{
"epoch": 3.1223628691983123,
"grad_norm": 0.44117632508277893,
"learning_rate": 1.5449933983179256e-05,
"loss": 0.3977,
"step": 1665
},
{
"epoch": 3.13173933427098,
"grad_norm": 0.45745447278022766,
"learning_rate": 1.5313921574682032e-05,
"loss": 0.3842,
"step": 1670
},
{
"epoch": 3.1411157993436474,
"grad_norm": 0.4728071689605713,
"learning_rate": 1.517824567159506e-05,
"loss": 0.3607,
"step": 1675
},
{
"epoch": 3.150492264416315,
"grad_norm": 0.4618283808231354,
"learning_rate": 1.5042910987453909e-05,
"loss": 0.4065,
"step": 1680
},
{
"epoch": 3.1598687294889825,
"grad_norm": 0.5012606978416443,
"learning_rate": 1.4907922223939874e-05,
"loss": 0.3856,
"step": 1685
},
{
"epoch": 3.1692451945616504,
"grad_norm": 0.42908909916877747,
"learning_rate": 1.4773284070716503e-05,
"loss": 0.4035,
"step": 1690
},
{
"epoch": 3.178621659634318,
"grad_norm": 0.4527268707752228,
"learning_rate": 1.4639001205266803e-05,
"loss": 0.3798,
"step": 1695
},
{
"epoch": 3.1879981247069855,
"grad_norm": 0.4418729841709137,
"learning_rate": 1.4505078292730632e-05,
"loss": 0.4021,
"step": 1700
},
{
"epoch": 3.197374589779653,
"grad_norm": 0.46943390369415283,
"learning_rate": 1.4371519985742715e-05,
"loss": 0.3648,
"step": 1705
},
{
"epoch": 3.2067510548523206,
"grad_norm": 0.49398934841156006,
"learning_rate": 1.4238330924270927e-05,
"loss": 0.3895,
"step": 1710
},
{
"epoch": 3.216127519924988,
"grad_norm": 0.47141122817993164,
"learning_rate": 1.4105515735455149e-05,
"loss": 0.4117,
"step": 1715
},
{
"epoch": 3.2255039849976557,
"grad_norm": 0.4392653703689575,
"learning_rate": 1.3973079033446501e-05,
"loss": 0.4201,
"step": 1720
},
{
"epoch": 3.2348804500703237,
"grad_norm": 0.5093908309936523,
"learning_rate": 1.3841025419247045e-05,
"loss": 0.3906,
"step": 1725
},
{
"epoch": 3.244256915142991,
"grad_norm": 0.4423893094062805,
"learning_rate": 1.3709359480549932e-05,
"loss": 0.3695,
"step": 1730
},
{
"epoch": 3.2536333802156587,
"grad_norm": 0.48355525732040405,
"learning_rate": 1.3578085791580008e-05,
"loss": 0.365,
"step": 1735
},
{
"epoch": 3.2630098452883263,
"grad_norm": 0.41361674666404724,
"learning_rate": 1.3447208912934927e-05,
"loss": 0.4157,
"step": 1740
},
{
"epoch": 3.272386310360994,
"grad_norm": 0.5110962390899658,
"learning_rate": 1.3316733391426716e-05,
"loss": 0.4005,
"step": 1745
},
{
"epoch": 3.2817627754336613,
"grad_norm": 0.4553833603858948,
"learning_rate": 1.3186663759923782e-05,
"loss": 0.3792,
"step": 1750
},
{
"epoch": 3.291139240506329,
"grad_norm": 0.5377179980278015,
"learning_rate": 1.3057004537193423e-05,
"loss": 0.4093,
"step": 1755
},
{
"epoch": 3.300515705578997,
"grad_norm": 0.4575432240962982,
"learning_rate": 1.2927760227744943e-05,
"loss": 0.3647,
"step": 1760
},
{
"epoch": 3.3098921706516644,
"grad_norm": 0.5318431854248047,
"learning_rate": 1.2798935321673e-05,
"loss": 0.374,
"step": 1765
},
{
"epoch": 3.319268635724332,
"grad_norm": 0.4416927695274353,
"learning_rate": 1.2670534294501756e-05,
"loss": 0.3888,
"step": 1770
},
{
"epoch": 3.3286451007969995,
"grad_norm": 0.5091699361801147,
"learning_rate": 1.2542561607029322e-05,
"loss": 0.3969,
"step": 1775
},
{
"epoch": 3.338021565869667,
"grad_norm": 0.45796316862106323,
"learning_rate": 1.2415021705172799e-05,
"loss": 0.4196,
"step": 1780
},
{
"epoch": 3.3473980309423346,
"grad_norm": 0.5065374970436096,
"learning_rate": 1.2287919019813807e-05,
"loss": 0.3885,
"step": 1785
},
{
"epoch": 3.356774496015002,
"grad_norm": 0.5083961486816406,
"learning_rate": 1.2161257966644588e-05,
"loss": 0.4118,
"step": 1790
},
{
"epoch": 3.36615096108767,
"grad_norm": 0.44367310404777527,
"learning_rate": 1.2035042946014572e-05,
"loss": 0.4124,
"step": 1795
},
{
"epoch": 3.3755274261603376,
"grad_norm": 0.46685874462127686,
"learning_rate": 1.1909278342777513e-05,
"loss": 0.394,
"step": 1800
},
{
"epoch": 3.384903891233005,
"grad_norm": 0.4649623930454254,
"learning_rate": 1.1783968526139121e-05,
"loss": 0.3996,
"step": 1805
},
{
"epoch": 3.3942803563056727,
"grad_norm": 0.5142059922218323,
"learning_rate": 1.1659117849505367e-05,
"loss": 0.4036,
"step": 1810
},
{
"epoch": 3.4036568213783402,
"grad_norm": 0.4626547396183014,
"learning_rate": 1.1534730650331096e-05,
"loss": 0.3976,
"step": 1815
},
{
"epoch": 3.413033286451008,
"grad_norm": 0.4900161623954773,
"learning_rate": 1.1410811249969475e-05,
"loss": 0.4282,
"step": 1820
},
{
"epoch": 3.4224097515236753,
"grad_norm": 0.43262672424316406,
"learning_rate": 1.1287363953521779e-05,
"loss": 0.3807,
"step": 1825
},
{
"epoch": 3.4317862165963433,
"grad_norm": 0.46232739090919495,
"learning_rate": 1.1164393049687868e-05,
"loss": 0.3761,
"step": 1830
},
{
"epoch": 3.441162681669011,
"grad_norm": 0.5012332201004028,
"learning_rate": 1.104190281061718e-05,
"loss": 0.4152,
"step": 1835
},
{
"epoch": 3.4505391467416784,
"grad_norm": 0.4876830279827118,
"learning_rate": 1.0919897491760279e-05,
"loss": 0.363,
"step": 1840
},
{
"epoch": 3.459915611814346,
"grad_norm": 0.44954073429107666,
"learning_rate": 1.0798381331721109e-05,
"loss": 0.3789,
"step": 1845
},
{
"epoch": 3.4692920768870135,
"grad_norm": 0.44105660915374756,
"learning_rate": 1.0677358552109618e-05,
"loss": 0.413,
"step": 1850
},
{
"epoch": 3.4786685419596814,
"grad_norm": 0.45533353090286255,
"learning_rate": 1.0556833357395188e-05,
"loss": 0.3812,
"step": 1855
},
{
"epoch": 3.488045007032349,
"grad_norm": 0.4477691054344177,
"learning_rate": 1.0436809934760527e-05,
"loss": 0.3949,
"step": 1860
},
{
"epoch": 3.4974214721050165,
"grad_norm": 0.4761820137500763,
"learning_rate": 1.031729245395622e-05,
"loss": 0.3649,
"step": 1865
},
{
"epoch": 3.506797937177684,
"grad_norm": 0.48367804288864136,
"learning_rate": 1.0198285067155827e-05,
"loss": 0.3942,
"step": 1870
},
{
"epoch": 3.5161744022503516,
"grad_norm": 0.46382734179496765,
"learning_rate": 1.0079791908811683e-05,
"loss": 0.4007,
"step": 1875
},
{
"epoch": 3.525550867323019,
"grad_norm": 0.43440571427345276,
"learning_rate": 9.961817095511242e-06,
"loss": 0.3773,
"step": 1880
},
{
"epoch": 3.5349273323956867,
"grad_norm": 0.45736581087112427,
"learning_rate": 9.844364725834057e-06,
"loss": 0.4196,
"step": 1885
},
{
"epoch": 3.5443037974683547,
"grad_norm": 0.49210745096206665,
"learning_rate": 9.727438880209366e-06,
"loss": 0.3792,
"step": 1890
},
{
"epoch": 3.5536802625410218,
"grad_norm": 0.5130970478057861,
"learning_rate": 9.611043620774419e-06,
"loss": 0.4036,
"step": 1895
},
{
"epoch": 3.5630567276136897,
"grad_norm": 0.471587598323822,
"learning_rate": 9.495182991233236e-06,
"loss": 0.3955,
"step": 1900
},
{
"epoch": 3.5724331926863573,
"grad_norm": 0.522293746471405,
"learning_rate": 9.379861016716224e-06,
"loss": 0.4056,
"step": 1905
},
{
"epoch": 3.581809657759025,
"grad_norm": 0.4667339026927948,
"learning_rate": 9.265081703640285e-06,
"loss": 0.4144,
"step": 1910
},
{
"epoch": 3.5911861228316924,
"grad_norm": 0.4434965252876282,
"learning_rate": 9.150849039569655e-06,
"loss": 0.39,
"step": 1915
},
{
"epoch": 3.60056258790436,
"grad_norm": 0.46499741077423096,
"learning_rate": 9.037166993077337e-06,
"loss": 0.3976,
"step": 1920
},
{
"epoch": 3.609939052977028,
"grad_norm": 0.47565406560897827,
"learning_rate": 8.92403951360726e-06,
"loss": 0.3774,
"step": 1925
},
{
"epoch": 3.6193155180496954,
"grad_norm": 0.45665204524993896,
"learning_rate": 8.811470531337102e-06,
"loss": 0.4209,
"step": 1930
},
{
"epoch": 3.628691983122363,
"grad_norm": 0.4445512890815735,
"learning_rate": 8.699463957041649e-06,
"loss": 0.428,
"step": 1935
},
{
"epoch": 3.6380684481950305,
"grad_norm": 0.5019098520278931,
"learning_rate": 8.588023681957028e-06,
"loss": 0.4121,
"step": 1940
},
{
"epoch": 3.647444913267698,
"grad_norm": 0.4833836555480957,
"learning_rate": 8.477153577645481e-06,
"loss": 0.4191,
"step": 1945
},
{
"epoch": 3.6568213783403656,
"grad_norm": 0.8952960968017578,
"learning_rate": 8.36685749586087e-06,
"loss": 0.417,
"step": 1950
},
{
"epoch": 3.666197843413033,
"grad_norm": 0.537151575088501,
"learning_rate": 8.257139268414844e-06,
"loss": 0.4047,
"step": 1955
},
{
"epoch": 3.675574308485701,
"grad_norm": 0.5016284584999084,
"learning_rate": 8.14800270704375e-06,
"loss": 0.4119,
"step": 1960
},
{
"epoch": 3.6849507735583686,
"grad_norm": 0.49896350502967834,
"learning_rate": 8.039451603276185e-06,
"loss": 0.3908,
"step": 1965
},
{
"epoch": 3.694327238631036,
"grad_norm": 0.4538595974445343,
"learning_rate": 7.931489728301292e-06,
"loss": 0.4636,
"step": 1970
},
{
"epoch": 3.7037037037037037,
"grad_norm": 0.5051478147506714,
"learning_rate": 7.8241208328377e-06,
"loss": 0.3925,
"step": 1975
},
{
"epoch": 3.7130801687763713,
"grad_norm": 0.464751660823822,
"learning_rate": 7.71734864700331e-06,
"loss": 0.3957,
"step": 1980
},
{
"epoch": 3.722456633849039,
"grad_norm": 0.4575950801372528,
"learning_rate": 7.611176880185597e-06,
"loss": 0.4004,
"step": 1985
},
{
"epoch": 3.7318330989217063,
"grad_norm": 1.0100325345993042,
"learning_rate": 7.505609220912821e-06,
"loss": 0.3945,
"step": 1990
},
{
"epoch": 3.7412095639943743,
"grad_norm": 0.47300100326538086,
"learning_rate": 7.4006493367258515e-06,
"loss": 0.4112,
"step": 1995
},
{
"epoch": 3.750586029067042,
"grad_norm": 0.4961658716201782,
"learning_rate": 7.2963008740507656e-06,
"loss": 0.4123,
"step": 2000
},
{
"epoch": 3.7599624941397094,
"grad_norm": 0.4959772229194641,
"learning_rate": 7.192567458072138e-06,
"loss": 0.4225,
"step": 2005
},
{
"epoch": 3.769338959212377,
"grad_norm": 0.4662582576274872,
"learning_rate": 7.089452692607146e-06,
"loss": 0.3601,
"step": 2010
},
{
"epoch": 3.7787154242850445,
"grad_norm": 0.4932651221752167,
"learning_rate": 6.986960159980327e-06,
"loss": 0.3654,
"step": 2015
},
{
"epoch": 3.788091889357712,
"grad_norm": 0.4127211272716522,
"learning_rate": 6.885093420899152e-06,
"loss": 0.404,
"step": 2020
},
{
"epoch": 3.7974683544303796,
"grad_norm": 0.5077162384986877,
"learning_rate": 6.783856014330281e-06,
"loss": 0.4015,
"step": 2025
},
{
"epoch": 3.8068448195030475,
"grad_norm": 0.48392489552497864,
"learning_rate": 6.68325145737669e-06,
"loss": 0.3995,
"step": 2030
},
{
"epoch": 3.816221284575715,
"grad_norm": 0.472433865070343,
"learning_rate": 6.583283245155414e-06,
"loss": 0.4102,
"step": 2035
},
{
"epoch": 3.8255977496483826,
"grad_norm": 0.5179023742675781,
"learning_rate": 6.483954850676133e-06,
"loss": 0.3872,
"step": 2040
},
{
"epoch": 3.83497421472105,
"grad_norm": 0.473317414522171,
"learning_rate": 6.385269724720547e-06,
"loss": 0.4137,
"step": 2045
},
{
"epoch": 3.8443506797937177,
"grad_norm": 0.5055838227272034,
"learning_rate": 6.28723129572247e-06,
"loss": 0.3769,
"step": 2050
},
{
"epoch": 3.8537271448663852,
"grad_norm": 0.7340875267982483,
"learning_rate": 6.189842969648737e-06,
"loss": 0.4172,
"step": 2055
},
{
"epoch": 3.8631036099390528,
"grad_norm": 0.47141486406326294,
"learning_rate": 6.0931081298808316e-06,
"loss": 0.3964,
"step": 2060
},
{
"epoch": 3.8724800750117208,
"grad_norm": 0.48367300629615784,
"learning_rate": 5.997030137097426e-06,
"loss": 0.3858,
"step": 2065
},
{
"epoch": 3.8818565400843883,
"grad_norm": 0.4850892424583435,
"learning_rate": 5.901612329157535e-06,
"loss": 0.4061,
"step": 2070
},
{
"epoch": 3.891233005157056,
"grad_norm": 0.45096391439437866,
"learning_rate": 5.806858020984629e-06,
"loss": 0.4316,
"step": 2075
},
{
"epoch": 3.9006094702297234,
"grad_norm": 0.4780539274215698,
"learning_rate": 5.712770504451426e-06,
"loss": 0.398,
"step": 2080
},
{
"epoch": 3.909985935302391,
"grad_norm": 0.5059804320335388,
"learning_rate": 5.619353048265552e-06,
"loss": 0.3819,
"step": 2085
},
{
"epoch": 3.9193624003750585,
"grad_norm": 0.4585495889186859,
"learning_rate": 5.526608897855953e-06,
"loss": 0.3873,
"step": 2090
},
{
"epoch": 3.928738865447726,
"grad_norm": 0.5159944295883179,
"learning_rate": 5.434541275260182e-06,
"loss": 0.421,
"step": 2095
},
{
"epoch": 3.938115330520394,
"grad_norm": 0.4670829176902771,
"learning_rate": 5.343153379012444e-06,
"loss": 0.4135,
"step": 2100
},
{
"epoch": 3.9474917955930615,
"grad_norm": 0.4940509796142578,
"learning_rate": 5.252448384032471e-06,
"loss": 0.391,
"step": 2105
},
{
"epoch": 3.956868260665729,
"grad_norm": 0.4698173999786377,
"learning_rate": 5.162429441515221e-06,
"loss": 0.4018,
"step": 2110
},
{
"epoch": 3.9662447257383966,
"grad_norm": 0.49286118149757385,
"learning_rate": 5.073099678821413e-06,
"loss": 0.386,
"step": 2115
},
{
"epoch": 3.975621190811064,
"grad_norm": 0.4916246235370636,
"learning_rate": 4.984462199368872e-06,
"loss": 0.418,
"step": 2120
},
{
"epoch": 3.9849976558837317,
"grad_norm": 0.5064667463302612,
"learning_rate": 4.8965200825247245e-06,
"loss": 0.3901,
"step": 2125
},
{
"epoch": 3.994374120956399,
"grad_norm": 0.40669238567352295,
"learning_rate": 4.809276383498376e-06,
"loss": 0.3967,
"step": 2130
},
{
"epoch": 4.003750586029067,
"grad_norm": 0.46348291635513306,
"learning_rate": 4.722734133235438e-06,
"loss": 0.4028,
"step": 2135
},
{
"epoch": 4.013127051101734,
"grad_norm": 0.4899981617927551,
"learning_rate": 4.636896338312374e-06,
"loss": 0.4171,
"step": 2140
},
{
"epoch": 4.022503516174402,
"grad_norm": 0.4799928665161133,
"learning_rate": 4.551765980832059e-06,
"loss": 0.3881,
"step": 2145
},
{
"epoch": 4.03187998124707,
"grad_norm": 0.5409300327301025,
"learning_rate": 4.467346018320198e-06,
"loss": 0.4012,
"step": 2150
},
{
"epoch": 4.041256446319737,
"grad_norm": 0.4798271358013153,
"learning_rate": 4.383639383622557e-06,
"loss": 0.3872,
"step": 2155
},
{
"epoch": 4.050632911392405,
"grad_norm": 0.487051784992218,
"learning_rate": 4.300648984803085e-06,
"loss": 0.3946,
"step": 2160
},
{
"epoch": 4.060009376465072,
"grad_norm": 0.4268711507320404,
"learning_rate": 4.218377705042867e-06,
"loss": 0.4011,
"step": 2165
},
{
"epoch": 4.06938584153774,
"grad_norm": 0.44820597767829895,
"learning_rate": 4.1368284025399965e-06,
"loss": 0.4089,
"step": 2170
},
{
"epoch": 4.0787623066104075,
"grad_norm": 0.42342251539230347,
"learning_rate": 4.0560039104102305e-06,
"loss": 0.3993,
"step": 2175
},
{
"epoch": 4.0881387716830755,
"grad_norm": 0.45130422711372375,
"learning_rate": 3.975907036588594e-06,
"loss": 0.3773,
"step": 2180
},
{
"epoch": 4.0975152367557435,
"grad_norm": 0.486767053604126,
"learning_rate": 3.8965405637318294e-06,
"loss": 0.3855,
"step": 2185
},
{
"epoch": 4.106891701828411,
"grad_norm": 0.5100731253623962,
"learning_rate": 3.817907249121713e-06,
"loss": 0.4134,
"step": 2190
},
{
"epoch": 4.1162681669010786,
"grad_norm": 0.46529749035835266,
"learning_rate": 3.7400098245692572e-06,
"loss": 0.4058,
"step": 2195
},
{
"epoch": 4.125644631973746,
"grad_norm": 0.4404692053794861,
"learning_rate": 3.662850996319825e-06,
"loss": 0.3808,
"step": 2200
},
{
"epoch": 4.135021097046414,
"grad_norm": 0.5206236839294434,
"learning_rate": 3.586433444959103e-06,
"loss": 0.3882,
"step": 2205
},
{
"epoch": 4.144397562119081,
"grad_norm": 0.5646428465843201,
"learning_rate": 3.5107598253199758e-06,
"loss": 0.3846,
"step": 2210
},
{
"epoch": 4.153774027191749,
"grad_norm": 0.45417729020118713,
"learning_rate": 3.4358327663902677e-06,
"loss": 0.3876,
"step": 2215
},
{
"epoch": 4.163150492264417,
"grad_norm": 0.557522177696228,
"learning_rate": 3.3616548712214756e-06,
"loss": 0.3866,
"step": 2220
},
{
"epoch": 4.172526957337084,
"grad_norm": 0.4840220510959625,
"learning_rate": 3.288228716838246e-06,
"loss": 0.3884,
"step": 2225
},
{
"epoch": 4.181903422409752,
"grad_norm": 0.513700008392334,
"learning_rate": 3.2155568541489268e-06,
"loss": 0.4142,
"step": 2230
},
{
"epoch": 4.191279887482419,
"grad_norm": 0.44611451029777527,
"learning_rate": 3.143641807856898e-06,
"loss": 0.4048,
"step": 2235
},
{
"epoch": 4.200656352555087,
"grad_norm": 0.4795036017894745,
"learning_rate": 3.0724860763728767e-06,
"loss": 0.4244,
"step": 2240
},
{
"epoch": 4.210032817627754,
"grad_norm": 0.4182005524635315,
"learning_rate": 3.0020921317281264e-06,
"loss": 0.3679,
"step": 2245
},
{
"epoch": 4.219409282700422,
"grad_norm": 0.485270619392395,
"learning_rate": 2.9324624194885436e-06,
"loss": 0.4163,
"step": 2250
},
{
"epoch": 4.22878574777309,
"grad_norm": 0.5020278096199036,
"learning_rate": 2.8635993586697553e-06,
"loss": 0.3861,
"step": 2255
},
{
"epoch": 4.238162212845757,
"grad_norm": 0.449848473072052,
"learning_rate": 2.795505341653007e-06,
"loss": 0.3841,
"step": 2260
},
{
"epoch": 4.247538677918425,
"grad_norm": 0.49279651045799255,
"learning_rate": 2.728182734102111e-06,
"loss": 0.3884,
"step": 2265
},
{
"epoch": 4.256915142991092,
"grad_norm": 0.4899311363697052,
"learning_rate": 2.6616338748812255e-06,
"loss": 0.4325,
"step": 2270
},
{
"epoch": 4.26629160806376,
"grad_norm": 0.4672718942165375,
"learning_rate": 2.595861075973613e-06,
"loss": 0.3927,
"step": 2275
},
{
"epoch": 4.275668073136427,
"grad_norm": 0.4548036754131317,
"learning_rate": 2.530866622401304e-06,
"loss": 0.3879,
"step": 2280
},
{
"epoch": 4.285044538209095,
"grad_norm": 0.4743858575820923,
"learning_rate": 2.4666527721457416e-06,
"loss": 0.4045,
"step": 2285
},
{
"epoch": 4.294421003281763,
"grad_norm": 0.405335932970047,
"learning_rate": 2.40322175606931e-06,
"loss": 0.4215,
"step": 2290
},
{
"epoch": 4.30379746835443,
"grad_norm": 0.5168406367301941,
"learning_rate": 2.3405757778378445e-06,
"loss": 0.3735,
"step": 2295
},
{
"epoch": 4.313173933427098,
"grad_norm": 0.47989800572395325,
"learning_rate": 2.278717013844059e-06,
"loss": 0.3858,
"step": 2300
},
{
"epoch": 4.322550398499765,
"grad_norm": 0.4565723240375519,
"learning_rate": 2.2176476131319707e-06,
"loss": 0.3568,
"step": 2305
},
{
"epoch": 4.331926863572433,
"grad_norm": 0.5169614553451538,
"learning_rate": 2.1573696973221922e-06,
"loss": 0.385,
"step": 2310
},
{
"epoch": 4.3413033286451,
"grad_norm": 0.487652987241745,
"learning_rate": 2.0978853605382624e-06,
"loss": 0.3884,
"step": 2315
},
{
"epoch": 4.350679793717768,
"grad_norm": 0.46447455883026123,
"learning_rate": 2.0391966693338733e-06,
"loss": 0.369,
"step": 2320
},
{
"epoch": 4.360056258790436,
"grad_norm": 0.4496999979019165,
"learning_rate": 1.9813056626210886e-06,
"loss": 0.368,
"step": 2325
},
{
"epoch": 4.369432723863103,
"grad_norm": 0.4512302577495575,
"learning_rate": 1.9242143515994933e-06,
"loss": 0.3907,
"step": 2330
},
{
"epoch": 4.378809188935771,
"grad_norm": 0.4967067837715149,
"learning_rate": 1.8679247196863425e-06,
"loss": 0.3816,
"step": 2335
},
{
"epoch": 4.3881856540084385,
"grad_norm": 0.47957539558410645,
"learning_rate": 1.8124387224476347e-06,
"loss": 0.3577,
"step": 2340
},
{
"epoch": 4.3975621190811065,
"grad_norm": 0.5050100684165955,
"learning_rate": 1.757758287530195e-06,
"loss": 0.3922,
"step": 2345
},
{
"epoch": 4.4069385841537745,
"grad_norm": 0.4630352258682251,
"learning_rate": 1.7038853145946804e-06,
"loss": 0.387,
"step": 2350
},
{
"epoch": 4.416315049226442,
"grad_norm": 0.47333136200904846,
"learning_rate": 1.6508216752496141e-06,
"loss": 0.4288,
"step": 2355
},
{
"epoch": 4.42569151429911,
"grad_norm": 0.502042829990387,
"learning_rate": 1.5985692129863395e-06,
"loss": 0.3881,
"step": 2360
},
{
"epoch": 4.435067979371777,
"grad_norm": 0.47139260172843933,
"learning_rate": 1.547129743114978e-06,
"loss": 0.4099,
"step": 2365
},
{
"epoch": 4.444444444444445,
"grad_norm": 0.4579063653945923,
"learning_rate": 1.496505052701372e-06,
"loss": 0.3691,
"step": 2370
},
{
"epoch": 4.453820909517112,
"grad_norm": 0.49161937832832336,
"learning_rate": 1.4466969005050013e-06,
"loss": 0.3807,
"step": 2375
},
{
"epoch": 4.46319737458978,
"grad_norm": 0.4590076506137848,
"learning_rate": 1.3977070169178763e-06,
"loss": 0.3943,
"step": 2380
},
{
"epoch": 4.472573839662447,
"grad_norm": 0.4571836590766907,
"learning_rate": 1.349537103904408e-06,
"loss": 0.3941,
"step": 2385
},
{
"epoch": 4.481950304735115,
"grad_norm": 0.49009573459625244,
"learning_rate": 1.3021888349423222e-06,
"loss": 0.385,
"step": 2390
},
{
"epoch": 4.491326769807783,
"grad_norm": 0.457081139087677,
"learning_rate": 1.2556638549644644e-06,
"loss": 0.4137,
"step": 2395
},
{
"epoch": 4.50070323488045,
"grad_norm": 0.46972543001174927,
"learning_rate": 1.2099637803016983e-06,
"loss": 0.3827,
"step": 2400
},
{
"epoch": 4.510079699953118,
"grad_norm": 0.44439297914505005,
"learning_rate": 1.1650901986267365e-06,
"loss": 0.3695,
"step": 2405
},
{
"epoch": 4.519456165025785,
"grad_norm": 0.5357493162155151,
"learning_rate": 1.1210446688989768e-06,
"loss": 0.444,
"step": 2410
},
{
"epoch": 4.528832630098453,
"grad_norm": 0.4965517818927765,
"learning_rate": 1.0778287213103478e-06,
"loss": 0.3816,
"step": 2415
},
{
"epoch": 4.538209095171121,
"grad_norm": 0.5623694062232971,
"learning_rate": 1.0354438572321546e-06,
"loss": 0.3774,
"step": 2420
},
{
"epoch": 4.547585560243788,
"grad_norm": 0.48698845505714417,
"learning_rate": 9.938915491629063e-07,
"loss": 0.4021,
"step": 2425
},
{
"epoch": 4.556962025316456,
"grad_norm": 0.4880342483520508,
"learning_rate": 9.531732406771771e-07,
"loss": 0.3901,
"step": 2430
},
{
"epoch": 4.566338490389123,
"grad_norm": 0.5080013871192932,
"learning_rate": 9.132903463754256e-07,
"loss": 0.4357,
"step": 2435
},
{
"epoch": 4.575714955461791,
"grad_norm": 0.5003151893615723,
"learning_rate": 8.742442518348965e-07,
"loss": 0.3877,
"step": 2440
},
{
"epoch": 4.585091420534458,
"grad_norm": 0.49231135845184326,
"learning_rate": 8.360363135614307e-07,
"loss": 0.3994,
"step": 2445
},
{
"epoch": 4.594467885607126,
"grad_norm": 0.4957781732082367,
"learning_rate": 7.986678589423758e-07,
"loss": 0.4222,
"step": 2450
},
{
"epoch": 4.603844350679793,
"grad_norm": 0.428396075963974,
"learning_rate": 7.621401862004634e-07,
"loss": 0.3991,
"step": 2455
},
{
"epoch": 4.613220815752461,
"grad_norm": 0.49666279554367065,
"learning_rate": 7.264545643486997e-07,
"loss": 0.369,
"step": 2460
},
{
"epoch": 4.622597280825129,
"grad_norm": 0.4541693925857544,
"learning_rate": 6.916122331462799e-07,
"loss": 0.3856,
"step": 2465
},
{
"epoch": 4.631973745897796,
"grad_norm": 0.41727012395858765,
"learning_rate": 6.576144030555259e-07,
"loss": 0.4261,
"step": 2470
},
{
"epoch": 4.641350210970464,
"grad_norm": 0.41427725553512573,
"learning_rate": 6.244622551998203e-07,
"loss": 0.3831,
"step": 2475
},
{
"epoch": 4.650726676043131,
"grad_norm": 0.5246068239212036,
"learning_rate": 5.921569413225913e-07,
"loss": 0.4014,
"step": 2480
},
{
"epoch": 4.660103141115799,
"grad_norm": 0.5102460384368896,
"learning_rate": 5.606995837472817e-07,
"loss": 0.3954,
"step": 2485
},
{
"epoch": 4.669479606188467,
"grad_norm": 0.45667141675949097,
"learning_rate": 5.300912753383625e-07,
"loss": 0.4065,
"step": 2490
},
{
"epoch": 4.6788560712611345,
"grad_norm": 0.4597122371196747,
"learning_rate": 5.003330794633776e-07,
"loss": 0.3947,
"step": 2495
},
{
"epoch": 4.688232536333802,
"grad_norm": 0.5187001824378967,
"learning_rate": 4.714260299559875e-07,
"loss": 0.4113,
"step": 2500
},
{
"epoch": 4.6976090014064695,
"grad_norm": 0.48345983028411865,
"learning_rate": 4.4337113108005314e-07,
"loss": 0.3972,
"step": 2505
},
{
"epoch": 4.7069854664791375,
"grad_norm": 0.4524119198322296,
"learning_rate": 4.161693574947556e-07,
"loss": 0.4381,
"step": 2510
},
{
"epoch": 4.716361931551805,
"grad_norm": 0.44738277792930603,
"learning_rate": 3.8982165422073445e-07,
"loss": 0.3794,
"step": 2515
},
{
"epoch": 4.725738396624473,
"grad_norm": 0.44519349932670593,
"learning_rate": 3.6432893660723886e-07,
"loss": 0.4074,
"step": 2520
},
{
"epoch": 4.73511486169714,
"grad_norm": 0.4711282253265381,
"learning_rate": 3.396920903003559e-07,
"loss": 0.3893,
"step": 2525
},
{
"epoch": 4.744491326769808,
"grad_norm": 0.4641968905925751,
"learning_rate": 3.1591197121222107e-07,
"loss": 0.3983,
"step": 2530
},
{
"epoch": 4.753867791842476,
"grad_norm": 0.4854121804237366,
"learning_rate": 2.9298940549128964e-07,
"loss": 0.3846,
"step": 2535
},
{
"epoch": 4.763244256915143,
"grad_norm": 0.4864709973335266,
"learning_rate": 2.7092518949362875e-07,
"loss": 0.3793,
"step": 2540
},
{
"epoch": 4.772620721987811,
"grad_norm": 0.4431411027908325,
"learning_rate": 2.4972008975527593e-07,
"loss": 0.4213,
"step": 2545
},
{
"epoch": 4.781997187060478,
"grad_norm": 0.47686663269996643,
"learning_rate": 2.2937484296556566e-07,
"loss": 0.3905,
"step": 2550
},
{
"epoch": 4.791373652133146,
"grad_norm": 0.4837285876274109,
"learning_rate": 2.0989015594158058e-07,
"loss": 0.4093,
"step": 2555
},
{
"epoch": 4.800750117205814,
"grad_norm": 0.46795183420181274,
"learning_rate": 1.9126670560356553e-07,
"loss": 0.4119,
"step": 2560
},
{
"epoch": 4.810126582278481,
"grad_norm": 0.49227696657180786,
"learning_rate": 1.735051389514214e-07,
"loss": 0.3885,
"step": 2565
},
{
"epoch": 4.819503047351149,
"grad_norm": 0.45765581727027893,
"learning_rate": 1.5660607304223141e-07,
"loss": 0.3872,
"step": 2570
},
{
"epoch": 4.828879512423816,
"grad_norm": 0.496937096118927,
"learning_rate": 1.4057009496881158e-07,
"loss": 0.404,
"step": 2575
},
{
"epoch": 4.838255977496484,
"grad_norm": 0.5063027143478394,
"learning_rate": 1.2539776183932982e-07,
"loss": 0.4079,
"step": 2580
},
{
"epoch": 4.847632442569151,
"grad_norm": 0.44478198885917664,
"learning_rate": 1.1108960075794372e-07,
"loss": 0.3629,
"step": 2585
},
{
"epoch": 4.857008907641819,
"grad_norm": 0.5043444037437439,
"learning_rate": 9.764610880648451e-08,
"loss": 0.3688,
"step": 2590
},
{
"epoch": 4.866385372714487,
"grad_norm": 0.47527217864990234,
"learning_rate": 8.506775302719039e-08,
"loss": 0.3782,
"step": 2595
},
{
"epoch": 4.875761837787154,
"grad_norm": 0.46921849250793457,
"learning_rate": 7.335497040648898e-08,
"loss": 0.4003,
"step": 2600
},
{
"epoch": 4.885138302859822,
"grad_norm": 0.4486777186393738,
"learning_rate": 6.250816785980385e-08,
"loss": 0.4175,
"step": 2605
},
{
"epoch": 4.894514767932489,
"grad_norm": 0.5218645930290222,
"learning_rate": 5.2527722217421416e-08,
"loss": 0.3857,
"step": 2610
},
{
"epoch": 4.903891233005157,
"grad_norm": 0.480591744184494,
"learning_rate": 4.3413980211412516e-08,
"loss": 0.3995,
"step": 2615
},
{
"epoch": 4.913267698077824,
"grad_norm": 0.5079865455627441,
"learning_rate": 3.516725846355873e-08,
"loss": 0.369,
"step": 2620
},
{
"epoch": 4.922644163150492,
"grad_norm": 0.5890299677848816,
"learning_rate": 2.7787843474386123e-08,
"loss": 0.3704,
"step": 2625
},
{
"epoch": 4.93202062822316,
"grad_norm": 0.49126380681991577,
"learning_rate": 2.127599161318161e-08,
"loss": 0.4038,
"step": 2630
},
{
"epoch": 4.941397093295827,
"grad_norm": 0.47775766253471375,
"learning_rate": 1.5631929109102828e-08,
"loss": 0.4069,
"step": 2635
},
{
"epoch": 4.950773558368495,
"grad_norm": 0.48111775517463684,
"learning_rate": 1.0855852043323289e-08,
"loss": 0.4016,
"step": 2640
},
{
"epoch": 4.960150023441162,
"grad_norm": 0.4834578335285187,
"learning_rate": 6.947926342204536e-09,
"loss": 0.3734,
"step": 2645
},
{
"epoch": 4.96952648851383,
"grad_norm": 0.4474198818206787,
"learning_rate": 3.908287771542396e-09,
"loss": 0.3704,
"step": 2650
},
{
"epoch": 4.978902953586498,
"grad_norm": 0.4454098045825958,
"learning_rate": 1.737041931845762e-09,
"loss": 0.4141,
"step": 2655
},
{
"epoch": 4.9882794186591655,
"grad_norm": 0.48251986503601074,
"learning_rate": 4.3426425467008035e-10,
"loss": 0.4006,
"step": 2660
},
{
"epoch": 4.9976558837318334,
"grad_norm": 0.4934045970439911,
"learning_rate": 0.0,
"loss": 0.3921,
"step": 2665
},
{
"epoch": 4.9976558837318334,
"step": 2665,
"total_flos": 1.9479197956994335e+18,
"train_loss": 0.41369020724162375,
"train_runtime": 63586.5105,
"train_samples_per_second": 0.671,
"train_steps_per_second": 0.042
}
],
"logging_steps": 5,
"max_steps": 2665,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.9479197956994335e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}