AGI_MASTER / trainer_state.json
sai1881's picture
Upload 9 files
ca83b54 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.5159621490167481,
"eval_steps": 500,
"global_step": 1500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.003439747660111654,
"grad_norm": 4.688905239105225,
"learning_rate": 1.7199862401100792e-07,
"loss": 13.1365,
"step": 10
},
{
"epoch": 0.006879495320223308,
"grad_norm": 4.80711030960083,
"learning_rate": 3.4399724802201585e-07,
"loss": 13.154,
"step": 20
},
{
"epoch": 0.010319242980334962,
"grad_norm": 4.933587074279785,
"learning_rate": 5.159958720330238e-07,
"loss": 13.1537,
"step": 30
},
{
"epoch": 0.013758990640446616,
"grad_norm": 4.939642906188965,
"learning_rate": 6.879944960440317e-07,
"loss": 13.1529,
"step": 40
},
{
"epoch": 0.01719873830055827,
"grad_norm": 5.070457935333252,
"learning_rate": 8.599931200550396e-07,
"loss": 13.1224,
"step": 50
},
{
"epoch": 0.020638485960669924,
"grad_norm": 5.011481285095215,
"learning_rate": 1.0319917440660475e-06,
"loss": 13.0346,
"step": 60
},
{
"epoch": 0.02407823362078158,
"grad_norm": 5.395399570465088,
"learning_rate": 1.2039903680770555e-06,
"loss": 12.9762,
"step": 70
},
{
"epoch": 0.027517981280893233,
"grad_norm": 5.811434745788574,
"learning_rate": 1.3759889920880634e-06,
"loss": 12.853,
"step": 80
},
{
"epoch": 0.03095772894100489,
"grad_norm": 6.088072776794434,
"learning_rate": 1.5479876160990713e-06,
"loss": 12.6852,
"step": 90
},
{
"epoch": 0.03439747660111654,
"grad_norm": 6.066171646118164,
"learning_rate": 1.7199862401100792e-06,
"loss": 12.4486,
"step": 100
},
{
"epoch": 0.037837224261228194,
"grad_norm": 6.659308910369873,
"learning_rate": 1.891984864121087e-06,
"loss": 12.1745,
"step": 110
},
{
"epoch": 0.04127697192133985,
"grad_norm": 7.230446815490723,
"learning_rate": 2.063983488132095e-06,
"loss": 11.8162,
"step": 120
},
{
"epoch": 0.04471671958145151,
"grad_norm": 7.676384449005127,
"learning_rate": 2.2359821121431026e-06,
"loss": 11.3668,
"step": 130
},
{
"epoch": 0.04815646724156316,
"grad_norm": 8.068939208984375,
"learning_rate": 2.407980736154111e-06,
"loss": 10.9014,
"step": 140
},
{
"epoch": 0.05159621490167481,
"grad_norm": 8.794692039489746,
"learning_rate": 2.5799793601651184e-06,
"loss": 10.327,
"step": 150
},
{
"epoch": 0.055035962561786465,
"grad_norm": 9.102154731750488,
"learning_rate": 2.7519779841761268e-06,
"loss": 9.6304,
"step": 160
},
{
"epoch": 0.05847571022189812,
"grad_norm": 8.208027839660645,
"learning_rate": 2.9239766081871343e-06,
"loss": 8.8888,
"step": 170
},
{
"epoch": 0.06191545788200978,
"grad_norm": 6.106681823730469,
"learning_rate": 3.0959752321981426e-06,
"loss": 8.1024,
"step": 180
},
{
"epoch": 0.06535520554212143,
"grad_norm": 3.8156211376190186,
"learning_rate": 3.2679738562091506e-06,
"loss": 7.5017,
"step": 190
},
{
"epoch": 0.06879495320223308,
"grad_norm": 3.4960334300994873,
"learning_rate": 3.4399724802201585e-06,
"loss": 7.0312,
"step": 200
},
{
"epoch": 0.07223470086234474,
"grad_norm": 3.1875991821289062,
"learning_rate": 3.611971104231166e-06,
"loss": 6.7019,
"step": 210
},
{
"epoch": 0.07567444852245639,
"grad_norm": 3.0836124420166016,
"learning_rate": 3.783969728242174e-06,
"loss": 6.3059,
"step": 220
},
{
"epoch": 0.07911419618256804,
"grad_norm": 2.8337793350219727,
"learning_rate": 3.955968352253183e-06,
"loss": 5.9064,
"step": 230
},
{
"epoch": 0.0825539438426797,
"grad_norm": 2.593134641647339,
"learning_rate": 4.12796697626419e-06,
"loss": 5.5292,
"step": 240
},
{
"epoch": 0.08599369150279136,
"grad_norm": 2.23457670211792,
"learning_rate": 4.299965600275198e-06,
"loss": 5.1508,
"step": 250
},
{
"epoch": 0.08943343916290301,
"grad_norm": 2.220684051513672,
"learning_rate": 4.471964224286205e-06,
"loss": 4.9064,
"step": 260
},
{
"epoch": 0.09287318682301467,
"grad_norm": 2.066509962081909,
"learning_rate": 4.643962848297214e-06,
"loss": 4.5587,
"step": 270
},
{
"epoch": 0.09631293448312632,
"grad_norm": 1.9317891597747803,
"learning_rate": 4.815961472308222e-06,
"loss": 4.2968,
"step": 280
},
{
"epoch": 0.09975268214323797,
"grad_norm": 1.852779746055603,
"learning_rate": 4.987960096319229e-06,
"loss": 3.9759,
"step": 290
},
{
"epoch": 0.10319242980334963,
"grad_norm": 1.7298812866210938,
"learning_rate": 5.159958720330237e-06,
"loss": 3.6811,
"step": 300
},
{
"epoch": 0.10663217746346128,
"grad_norm": 1.5647941827774048,
"learning_rate": 5.331957344341246e-06,
"loss": 3.4349,
"step": 310
},
{
"epoch": 0.11007192512357293,
"grad_norm": 1.423771619796753,
"learning_rate": 5.5039559683522536e-06,
"loss": 3.1899,
"step": 320
},
{
"epoch": 0.11351167278368458,
"grad_norm": 1.2160062789916992,
"learning_rate": 5.675954592363261e-06,
"loss": 3.0158,
"step": 330
},
{
"epoch": 0.11695142044379624,
"grad_norm": 1.162079095840454,
"learning_rate": 5.8479532163742686e-06,
"loss": 2.8798,
"step": 340
},
{
"epoch": 0.1203911681039079,
"grad_norm": 0.9763687252998352,
"learning_rate": 6.019951840385278e-06,
"loss": 2.7573,
"step": 350
},
{
"epoch": 0.12383091576401956,
"grad_norm": 0.9514108300209045,
"learning_rate": 6.191950464396285e-06,
"loss": 2.6355,
"step": 360
},
{
"epoch": 0.1272706634241312,
"grad_norm": 0.8850705623626709,
"learning_rate": 6.363949088407294e-06,
"loss": 2.5371,
"step": 370
},
{
"epoch": 0.13071041108424286,
"grad_norm": 0.8824434280395508,
"learning_rate": 6.535947712418301e-06,
"loss": 2.4404,
"step": 380
},
{
"epoch": 0.13415015874435451,
"grad_norm": 0.9136425256729126,
"learning_rate": 6.7079463364293095e-06,
"loss": 2.3809,
"step": 390
},
{
"epoch": 0.13758990640446617,
"grad_norm": 1.0447123050689697,
"learning_rate": 6.879944960440317e-06,
"loss": 2.3013,
"step": 400
},
{
"epoch": 0.14102965406457782,
"grad_norm": 1.0016720294952393,
"learning_rate": 7.0519435844513245e-06,
"loss": 2.2526,
"step": 410
},
{
"epoch": 0.14446940172468947,
"grad_norm": 0.9885135293006897,
"learning_rate": 7.223942208462332e-06,
"loss": 2.2099,
"step": 420
},
{
"epoch": 0.14790914938480113,
"grad_norm": 0.9515292048454285,
"learning_rate": 7.39594083247334e-06,
"loss": 2.1602,
"step": 430
},
{
"epoch": 0.15134889704491278,
"grad_norm": 0.8287838101387024,
"learning_rate": 7.567939456484348e-06,
"loss": 2.1251,
"step": 440
},
{
"epoch": 0.15478864470502443,
"grad_norm": 1.160489559173584,
"learning_rate": 7.739938080495356e-06,
"loss": 2.0836,
"step": 450
},
{
"epoch": 0.15822839236513608,
"grad_norm": 1.1019588708877563,
"learning_rate": 7.911936704506365e-06,
"loss": 2.0474,
"step": 460
},
{
"epoch": 0.16166814002524774,
"grad_norm": 0.9389374256134033,
"learning_rate": 8.083935328517373e-06,
"loss": 2.013,
"step": 470
},
{
"epoch": 0.1651078876853594,
"grad_norm": 0.9914370775222778,
"learning_rate": 8.25593395252838e-06,
"loss": 1.9829,
"step": 480
},
{
"epoch": 0.16854763534547107,
"grad_norm": 0.8827924132347107,
"learning_rate": 8.427932576539388e-06,
"loss": 1.9574,
"step": 490
},
{
"epoch": 0.17198738300558272,
"grad_norm": 1.0819416046142578,
"learning_rate": 8.599931200550395e-06,
"loss": 1.9333,
"step": 500
},
{
"epoch": 0.17542713066569438,
"grad_norm": 1.006886601448059,
"learning_rate": 8.771929824561403e-06,
"loss": 1.8845,
"step": 510
},
{
"epoch": 0.17886687832580603,
"grad_norm": 0.8748846054077148,
"learning_rate": 8.94392844857241e-06,
"loss": 1.8757,
"step": 520
},
{
"epoch": 0.18230662598591768,
"grad_norm": 0.9551517963409424,
"learning_rate": 9.11592707258342e-06,
"loss": 1.8478,
"step": 530
},
{
"epoch": 0.18574637364602933,
"grad_norm": 1.4100078344345093,
"learning_rate": 9.287925696594429e-06,
"loss": 1.8186,
"step": 540
},
{
"epoch": 0.189186121306141,
"grad_norm": 1.5030393600463867,
"learning_rate": 9.459924320605436e-06,
"loss": 1.7939,
"step": 550
},
{
"epoch": 0.19262586896625264,
"grad_norm": 1.0008896589279175,
"learning_rate": 9.631922944616444e-06,
"loss": 1.7711,
"step": 560
},
{
"epoch": 0.1960656166263643,
"grad_norm": 1.5640122890472412,
"learning_rate": 9.803921568627451e-06,
"loss": 1.7555,
"step": 570
},
{
"epoch": 0.19950536428647594,
"grad_norm": 1.041014313697815,
"learning_rate": 9.975920192638459e-06,
"loss": 1.7305,
"step": 580
},
{
"epoch": 0.2029451119465876,
"grad_norm": 1.2513008117675781,
"learning_rate": 1.0147918816649466e-05,
"loss": 1.7083,
"step": 590
},
{
"epoch": 0.20638485960669925,
"grad_norm": 1.2905975580215454,
"learning_rate": 1.0319917440660474e-05,
"loss": 1.7061,
"step": 600
},
{
"epoch": 0.2098246072668109,
"grad_norm": 1.0839684009552002,
"learning_rate": 1.0491916064671483e-05,
"loss": 1.6722,
"step": 610
},
{
"epoch": 0.21326435492692256,
"grad_norm": 1.2286721467971802,
"learning_rate": 1.0663914688682492e-05,
"loss": 1.6444,
"step": 620
},
{
"epoch": 0.2167041025870342,
"grad_norm": 1.3250325918197632,
"learning_rate": 1.08359133126935e-05,
"loss": 1.6229,
"step": 630
},
{
"epoch": 0.22014385024714586,
"grad_norm": 1.176580548286438,
"learning_rate": 1.1007911936704507e-05,
"loss": 1.6143,
"step": 640
},
{
"epoch": 0.22358359790725751,
"grad_norm": 1.4954670667648315,
"learning_rate": 1.1179910560715515e-05,
"loss": 1.597,
"step": 650
},
{
"epoch": 0.22702334556736917,
"grad_norm": 1.0406630039215088,
"learning_rate": 1.1351909184726522e-05,
"loss": 1.5877,
"step": 660
},
{
"epoch": 0.23046309322748082,
"grad_norm": 1.5156970024108887,
"learning_rate": 1.152390780873753e-05,
"loss": 1.5601,
"step": 670
},
{
"epoch": 0.23390284088759247,
"grad_norm": 1.3882994651794434,
"learning_rate": 1.1695906432748537e-05,
"loss": 1.5456,
"step": 680
},
{
"epoch": 0.23734258854770415,
"grad_norm": 1.5137856006622314,
"learning_rate": 1.1867905056759546e-05,
"loss": 1.5101,
"step": 690
},
{
"epoch": 0.2407823362078158,
"grad_norm": 1.2775577306747437,
"learning_rate": 1.2039903680770556e-05,
"loss": 1.5042,
"step": 700
},
{
"epoch": 0.24422208386792746,
"grad_norm": 1.3952692747116089,
"learning_rate": 1.2211902304781563e-05,
"loss": 1.4864,
"step": 710
},
{
"epoch": 0.2476618315280391,
"grad_norm": 1.5909626483917236,
"learning_rate": 1.238390092879257e-05,
"loss": 1.4703,
"step": 720
},
{
"epoch": 0.25110157918815074,
"grad_norm": 1.4731673002243042,
"learning_rate": 1.255589955280358e-05,
"loss": 1.4434,
"step": 730
},
{
"epoch": 0.2545413268482624,
"grad_norm": 1.370367169380188,
"learning_rate": 1.2727898176814587e-05,
"loss": 1.4522,
"step": 740
},
{
"epoch": 0.25798107450837404,
"grad_norm": 1.7441356182098389,
"learning_rate": 1.2899896800825595e-05,
"loss": 1.4124,
"step": 750
},
{
"epoch": 0.2614208221684857,
"grad_norm": 1.6607742309570312,
"learning_rate": 1.3071895424836602e-05,
"loss": 1.409,
"step": 760
},
{
"epoch": 0.26486056982859735,
"grad_norm": 1.423419713973999,
"learning_rate": 1.324389404884761e-05,
"loss": 1.3912,
"step": 770
},
{
"epoch": 0.26830031748870903,
"grad_norm": 1.6139755249023438,
"learning_rate": 1.3415892672858619e-05,
"loss": 1.3753,
"step": 780
},
{
"epoch": 0.2717400651488207,
"grad_norm": 1.9674487113952637,
"learning_rate": 1.3587891296869626e-05,
"loss": 1.3718,
"step": 790
},
{
"epoch": 0.27517981280893233,
"grad_norm": 2.2793004512786865,
"learning_rate": 1.3759889920880634e-05,
"loss": 1.3426,
"step": 800
},
{
"epoch": 0.278619560469044,
"grad_norm": 1.6088732481002808,
"learning_rate": 1.3931888544891641e-05,
"loss": 1.3336,
"step": 810
},
{
"epoch": 0.28205930812915564,
"grad_norm": 2.0331807136535645,
"learning_rate": 1.4103887168902649e-05,
"loss": 1.3224,
"step": 820
},
{
"epoch": 0.2854990557892673,
"grad_norm": 1.7430800199508667,
"learning_rate": 1.4275885792913656e-05,
"loss": 1.3212,
"step": 830
},
{
"epoch": 0.28893880344937894,
"grad_norm": 1.4452030658721924,
"learning_rate": 1.4447884416924664e-05,
"loss": 1.3141,
"step": 840
},
{
"epoch": 0.2923785511094906,
"grad_norm": 1.6565144062042236,
"learning_rate": 1.4619883040935673e-05,
"loss": 1.2885,
"step": 850
},
{
"epoch": 0.29581829876960225,
"grad_norm": 1.3976999521255493,
"learning_rate": 1.479188166494668e-05,
"loss": 1.2906,
"step": 860
},
{
"epoch": 0.29925804642971393,
"grad_norm": 2.229381561279297,
"learning_rate": 1.4963880288957688e-05,
"loss": 1.267,
"step": 870
},
{
"epoch": 0.30269779408982556,
"grad_norm": 1.6807219982147217,
"learning_rate": 1.5135878912968696e-05,
"loss": 1.2696,
"step": 880
},
{
"epoch": 0.30613754174993724,
"grad_norm": 1.2693103551864624,
"learning_rate": 1.5307877536979705e-05,
"loss": 1.2572,
"step": 890
},
{
"epoch": 0.30957728941004886,
"grad_norm": 1.9241502285003662,
"learning_rate": 1.5479876160990712e-05,
"loss": 1.2568,
"step": 900
},
{
"epoch": 0.31301703707016054,
"grad_norm": 1.660744547843933,
"learning_rate": 1.5651874785001723e-05,
"loss": 1.2521,
"step": 910
},
{
"epoch": 0.31645678473027217,
"grad_norm": 1.706275463104248,
"learning_rate": 1.582387340901273e-05,
"loss": 1.2357,
"step": 920
},
{
"epoch": 0.31989653239038385,
"grad_norm": 1.8540180921554565,
"learning_rate": 1.5995872033023738e-05,
"loss": 1.2167,
"step": 930
},
{
"epoch": 0.3233362800504955,
"grad_norm": 1.8119511604309082,
"learning_rate": 1.6167870657034746e-05,
"loss": 1.2544,
"step": 940
},
{
"epoch": 0.32677602771060715,
"grad_norm": 1.9702472686767578,
"learning_rate": 1.6339869281045753e-05,
"loss": 1.2094,
"step": 950
},
{
"epoch": 0.3302157753707188,
"grad_norm": 2.0417630672454834,
"learning_rate": 1.651186790505676e-05,
"loss": 1.1975,
"step": 960
},
{
"epoch": 0.33365552303083046,
"grad_norm": 1.4115933179855347,
"learning_rate": 1.6683866529067768e-05,
"loss": 1.1897,
"step": 970
},
{
"epoch": 0.33709527069094214,
"grad_norm": 2.0552473068237305,
"learning_rate": 1.6855865153078776e-05,
"loss": 1.2015,
"step": 980
},
{
"epoch": 0.34053501835105376,
"grad_norm": 2.383172035217285,
"learning_rate": 1.7027863777089783e-05,
"loss": 1.1849,
"step": 990
},
{
"epoch": 0.34397476601116544,
"grad_norm": 2.141005516052246,
"learning_rate": 1.719986240110079e-05,
"loss": 1.1892,
"step": 1000
},
{
"epoch": 0.34741451367127707,
"grad_norm": 2.242393732070923,
"learning_rate": 1.7371861025111798e-05,
"loss": 1.1812,
"step": 1010
},
{
"epoch": 0.35085426133138875,
"grad_norm": 2.2587263584136963,
"learning_rate": 1.7543859649122806e-05,
"loss": 1.1722,
"step": 1020
},
{
"epoch": 0.3542940089915004,
"grad_norm": 2.2541704177856445,
"learning_rate": 1.7715858273133813e-05,
"loss": 1.1677,
"step": 1030
},
{
"epoch": 0.35773375665161206,
"grad_norm": 1.5090018510818481,
"learning_rate": 1.788785689714482e-05,
"loss": 1.1496,
"step": 1040
},
{
"epoch": 0.3611735043117237,
"grad_norm": 2.1420912742614746,
"learning_rate": 1.805985552115583e-05,
"loss": 1.152,
"step": 1050
},
{
"epoch": 0.36461325197183536,
"grad_norm": 1.6955997943878174,
"learning_rate": 1.823185414516684e-05,
"loss": 1.1514,
"step": 1060
},
{
"epoch": 0.368052999631947,
"grad_norm": 2.603424549102783,
"learning_rate": 1.840385276917785e-05,
"loss": 1.1543,
"step": 1070
},
{
"epoch": 0.37149274729205867,
"grad_norm": 1.6074223518371582,
"learning_rate": 1.8575851393188857e-05,
"loss": 1.1632,
"step": 1080
},
{
"epoch": 0.3749324949521703,
"grad_norm": 2.813610553741455,
"learning_rate": 1.8747850017199865e-05,
"loss": 1.1383,
"step": 1090
},
{
"epoch": 0.378372242612282,
"grad_norm": 2.169351816177368,
"learning_rate": 1.8919848641210872e-05,
"loss": 1.1316,
"step": 1100
},
{
"epoch": 0.3818119902723936,
"grad_norm": 2.653705596923828,
"learning_rate": 1.909184726522188e-05,
"loss": 1.1433,
"step": 1110
},
{
"epoch": 0.3852517379325053,
"grad_norm": 3.17232084274292,
"learning_rate": 1.9263845889232888e-05,
"loss": 1.1385,
"step": 1120
},
{
"epoch": 0.3886914855926169,
"grad_norm": 2.2379255294799805,
"learning_rate": 1.9435844513243895e-05,
"loss": 1.1205,
"step": 1130
},
{
"epoch": 0.3921312332527286,
"grad_norm": 2.5751757621765137,
"learning_rate": 1.9607843137254903e-05,
"loss": 1.1125,
"step": 1140
},
{
"epoch": 0.3955709809128402,
"grad_norm": 1.6813164949417114,
"learning_rate": 1.977984176126591e-05,
"loss": 1.1147,
"step": 1150
},
{
"epoch": 0.3990107285729519,
"grad_norm": 2.8093101978302,
"learning_rate": 1.9951840385276918e-05,
"loss": 1.1057,
"step": 1160
},
{
"epoch": 0.40245047623306357,
"grad_norm": 2.059581995010376,
"learning_rate": 2.0123839009287925e-05,
"loss": 1.1037,
"step": 1170
},
{
"epoch": 0.4058902238931752,
"grad_norm": 1.9385766983032227,
"learning_rate": 2.0295837633298933e-05,
"loss": 1.0987,
"step": 1180
},
{
"epoch": 0.4093299715532869,
"grad_norm": 1.7843406200408936,
"learning_rate": 2.046783625730994e-05,
"loss": 1.0917,
"step": 1190
},
{
"epoch": 0.4127697192133985,
"grad_norm": 1.7654945850372314,
"learning_rate": 2.0639834881320948e-05,
"loss": 1.1,
"step": 1200
},
{
"epoch": 0.4162094668735102,
"grad_norm": 2.3094961643218994,
"learning_rate": 2.081183350533196e-05,
"loss": 1.0891,
"step": 1210
},
{
"epoch": 0.4196492145336218,
"grad_norm": 2.654541492462158,
"learning_rate": 2.0983832129342966e-05,
"loss": 1.0909,
"step": 1220
},
{
"epoch": 0.4230889621937335,
"grad_norm": 2.7142221927642822,
"learning_rate": 2.1155830753353977e-05,
"loss": 1.1068,
"step": 1230
},
{
"epoch": 0.4265287098538451,
"grad_norm": 1.7891792058944702,
"learning_rate": 2.1327829377364984e-05,
"loss": 1.076,
"step": 1240
},
{
"epoch": 0.4299684575139568,
"grad_norm": 1.8982404470443726,
"learning_rate": 2.1499828001375992e-05,
"loss": 1.0703,
"step": 1250
},
{
"epoch": 0.4334082051740684,
"grad_norm": 2.5560953617095947,
"learning_rate": 2.1671826625387e-05,
"loss": 1.0696,
"step": 1260
},
{
"epoch": 0.4368479528341801,
"grad_norm": 2.6159865856170654,
"learning_rate": 2.1843825249398007e-05,
"loss": 1.069,
"step": 1270
},
{
"epoch": 0.4402877004942917,
"grad_norm": 2.1036102771759033,
"learning_rate": 2.2015823873409014e-05,
"loss": 1.054,
"step": 1280
},
{
"epoch": 0.4437274481544034,
"grad_norm": 1.6638193130493164,
"learning_rate": 2.2187822497420022e-05,
"loss": 1.0554,
"step": 1290
},
{
"epoch": 0.44716719581451503,
"grad_norm": 1.7467694282531738,
"learning_rate": 2.235982112143103e-05,
"loss": 1.0356,
"step": 1300
},
{
"epoch": 0.4506069434746267,
"grad_norm": 1.9570752382278442,
"learning_rate": 2.2531819745442037e-05,
"loss": 1.0501,
"step": 1310
},
{
"epoch": 0.45404669113473833,
"grad_norm": 2.062814950942993,
"learning_rate": 2.2703818369453044e-05,
"loss": 1.041,
"step": 1320
},
{
"epoch": 0.45748643879485,
"grad_norm": 3.205482006072998,
"learning_rate": 2.2875816993464052e-05,
"loss": 1.0428,
"step": 1330
},
{
"epoch": 0.46092618645496164,
"grad_norm": 3.798652172088623,
"learning_rate": 2.304781561747506e-05,
"loss": 1.055,
"step": 1340
},
{
"epoch": 0.4643659341150733,
"grad_norm": 2.6361844539642334,
"learning_rate": 2.3219814241486067e-05,
"loss": 1.051,
"step": 1350
},
{
"epoch": 0.46780568177518494,
"grad_norm": 3.9871771335601807,
"learning_rate": 2.3391812865497074e-05,
"loss": 1.0576,
"step": 1360
},
{
"epoch": 0.4712454294352966,
"grad_norm": 2.9001245498657227,
"learning_rate": 2.3563811489508085e-05,
"loss": 1.0521,
"step": 1370
},
{
"epoch": 0.4746851770954083,
"grad_norm": 3.8362972736358643,
"learning_rate": 2.3735810113519093e-05,
"loss": 1.0392,
"step": 1380
},
{
"epoch": 0.47812492475551993,
"grad_norm": 3.222538709640503,
"learning_rate": 2.39078087375301e-05,
"loss": 1.0449,
"step": 1390
},
{
"epoch": 0.4815646724156316,
"grad_norm": 4.081332683563232,
"learning_rate": 2.407980736154111e-05,
"loss": 1.0402,
"step": 1400
},
{
"epoch": 0.48500442007574324,
"grad_norm": 3.8474042415618896,
"learning_rate": 2.425180598555212e-05,
"loss": 1.0199,
"step": 1410
},
{
"epoch": 0.4884441677358549,
"grad_norm": 3.6466848850250244,
"learning_rate": 2.4423804609563126e-05,
"loss": 1.0352,
"step": 1420
},
{
"epoch": 0.49188391539596654,
"grad_norm": 3.44462251663208,
"learning_rate": 2.4595803233574134e-05,
"loss": 1.0476,
"step": 1430
},
{
"epoch": 0.4953236630560782,
"grad_norm": 2.647538661956787,
"learning_rate": 2.476780185758514e-05,
"loss": 1.0094,
"step": 1440
},
{
"epoch": 0.49876341071618985,
"grad_norm": 2.3128774166107178,
"learning_rate": 2.493980048159615e-05,
"loss": 1.0291,
"step": 1450
},
{
"epoch": 0.5022031583763015,
"grad_norm": 3.0225725173950195,
"learning_rate": 2.511179910560716e-05,
"loss": 1.0321,
"step": 1460
},
{
"epoch": 0.5056429060364132,
"grad_norm": 2.3091073036193848,
"learning_rate": 2.5283797729618164e-05,
"loss": 1.0078,
"step": 1470
},
{
"epoch": 0.5090826536965248,
"grad_norm": 2.2101924419403076,
"learning_rate": 2.5455796353629174e-05,
"loss": 1.0316,
"step": 1480
},
{
"epoch": 0.5125224013566365,
"grad_norm": 2.288010597229004,
"learning_rate": 2.562779497764018e-05,
"loss": 1.0158,
"step": 1490
},
{
"epoch": 0.5159621490167481,
"grad_norm": 2.550391674041748,
"learning_rate": 2.579979360165119e-05,
"loss": 1.0131,
"step": 1500
}
],
"logging_steps": 10,
"max_steps": 14535,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 4.63721472e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}