WikiLinking-bi-gliner-base-5000 / trainer_state.json
BioMike's picture
Upload folder using huggingface_hub
f97bde0 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.040401102142066436,
"eval_steps": 500,
"global_step": 5000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 8.080220428413287e-05,
"grad_norm": 4661457.0,
"learning_rate": 4.040404040404041e-08,
"loss": 678012.1,
"step": 10
},
{
"epoch": 0.00016160440856826573,
"grad_norm": 9865970.0,
"learning_rate": 8.080808080808082e-08,
"loss": 841980.1,
"step": 20
},
{
"epoch": 0.00024240661285239863,
"grad_norm": 10027139.0,
"learning_rate": 1.2121212121212122e-07,
"loss": 816764.25,
"step": 30
},
{
"epoch": 0.00032320881713653147,
"grad_norm": 16910428.0,
"learning_rate": 1.6161616161616163e-07,
"loss": 961656.7,
"step": 40
},
{
"epoch": 0.00040401102142066436,
"grad_norm": 5068422.0,
"learning_rate": 2.0202020202020202e-07,
"loss": 679086.95,
"step": 50
},
{
"epoch": 0.00048481322570479725,
"grad_norm": 2293057.5,
"learning_rate": 2.4242424242424244e-07,
"loss": 949613.2,
"step": 60
},
{
"epoch": 0.0005656154299889301,
"grad_norm": 7849363.5,
"learning_rate": 2.8282828282828283e-07,
"loss": 859798.0,
"step": 70
},
{
"epoch": 0.0006464176342730629,
"grad_norm": 2669786.75,
"learning_rate": 3.2323232323232327e-07,
"loss": 863443.9,
"step": 80
},
{
"epoch": 0.0007272198385571959,
"grad_norm": 6021406.5,
"learning_rate": 3.6363636363636366e-07,
"loss": 734401.3,
"step": 90
},
{
"epoch": 0.0008080220428413287,
"grad_norm": 5000674.5,
"learning_rate": 4.0404040404040405e-07,
"loss": 526539.8,
"step": 100
},
{
"epoch": 0.0008888242471254616,
"grad_norm": 4362635.5,
"learning_rate": 4.444444444444445e-07,
"loss": 422810.45,
"step": 110
},
{
"epoch": 0.0009696264514095945,
"grad_norm": 5343355.0,
"learning_rate": 4.848484848484849e-07,
"loss": 490438.55,
"step": 120
},
{
"epoch": 0.0010504286556937273,
"grad_norm": 11366241.0,
"learning_rate": 5.252525252525253e-07,
"loss": 704809.3,
"step": 130
},
{
"epoch": 0.0011312308599778602,
"grad_norm": 5520147.0,
"learning_rate": 5.656565656565657e-07,
"loss": 526687.5,
"step": 140
},
{
"epoch": 0.001212033064261993,
"grad_norm": 3778741.0,
"learning_rate": 6.060606060606061e-07,
"loss": 322092.525,
"step": 150
},
{
"epoch": 0.0012928352685461259,
"grad_norm": 3378473.0,
"learning_rate": 6.464646464646465e-07,
"loss": 516098.85,
"step": 160
},
{
"epoch": 0.0013736374728302587,
"grad_norm": 4408531.0,
"learning_rate": 6.868686868686869e-07,
"loss": 410817.35,
"step": 170
},
{
"epoch": 0.0014544396771143918,
"grad_norm": 766378.375,
"learning_rate": 7.272727272727273e-07,
"loss": 197623.1625,
"step": 180
},
{
"epoch": 0.0015352418813985246,
"grad_norm": 5468394.5,
"learning_rate": 7.676767676767678e-07,
"loss": 238133.6,
"step": 190
},
{
"epoch": 0.0016160440856826574,
"grad_norm": 7980194.5,
"learning_rate": 8.080808080808081e-07,
"loss": 182498.725,
"step": 200
},
{
"epoch": 0.0016968462899667903,
"grad_norm": 737060.125,
"learning_rate": 8.484848484848486e-07,
"loss": 122880.075,
"step": 210
},
{
"epoch": 0.0017776484942509231,
"grad_norm": 476725.8125,
"learning_rate": 8.88888888888889e-07,
"loss": 100649.7563,
"step": 220
},
{
"epoch": 0.001858450698535056,
"grad_norm": 353791.875,
"learning_rate": 9.292929292929294e-07,
"loss": 58150.4563,
"step": 230
},
{
"epoch": 0.001939252902819189,
"grad_norm": 684820.5625,
"learning_rate": 9.696969696969698e-07,
"loss": 44040.125,
"step": 240
},
{
"epoch": 0.0020200551071033216,
"grad_norm": 228699.796875,
"learning_rate": 1.0101010101010103e-06,
"loss": 21505.6156,
"step": 250
},
{
"epoch": 0.0021008573113874547,
"grad_norm": 91799.625,
"learning_rate": 1.0505050505050506e-06,
"loss": 11209.5648,
"step": 260
},
{
"epoch": 0.0021816595156715873,
"grad_norm": 64077.56640625,
"learning_rate": 1.090909090909091e-06,
"loss": 5405.107,
"step": 270
},
{
"epoch": 0.0022624617199557204,
"grad_norm": 68140.5078125,
"learning_rate": 1.1313131313131313e-06,
"loss": 2509.3971,
"step": 280
},
{
"epoch": 0.0023432639242398534,
"grad_norm": 38560.15625,
"learning_rate": 1.1717171717171719e-06,
"loss": 1295.3724,
"step": 290
},
{
"epoch": 0.002424066128523986,
"grad_norm": 2864.86474609375,
"learning_rate": 1.2121212121212122e-06,
"loss": 923.2405,
"step": 300
},
{
"epoch": 0.002504868332808119,
"grad_norm": 2044.932861328125,
"learning_rate": 1.2525252525252527e-06,
"loss": 950.31,
"step": 310
},
{
"epoch": 0.0025856705370922517,
"grad_norm": 2294.186279296875,
"learning_rate": 1.292929292929293e-06,
"loss": 830.1033,
"step": 320
},
{
"epoch": 0.002666472741376385,
"grad_norm": 941.1571044921875,
"learning_rate": 1.3333333333333334e-06,
"loss": 711.3893,
"step": 330
},
{
"epoch": 0.0027472749456605174,
"grad_norm": 6470.8056640625,
"learning_rate": 1.3737373737373738e-06,
"loss": 746.1014,
"step": 340
},
{
"epoch": 0.0028280771499446505,
"grad_norm": 2412.6904296875,
"learning_rate": 1.4141414141414143e-06,
"loss": 774.8737,
"step": 350
},
{
"epoch": 0.0029088793542287835,
"grad_norm": 2536.60791015625,
"learning_rate": 1.4545454545454546e-06,
"loss": 832.4265,
"step": 360
},
{
"epoch": 0.002989681558512916,
"grad_norm": 5404.72265625,
"learning_rate": 1.4949494949494952e-06,
"loss": 676.5023,
"step": 370
},
{
"epoch": 0.003070483762797049,
"grad_norm": 1271.8477783203125,
"learning_rate": 1.5353535353535355e-06,
"loss": 518.433,
"step": 380
},
{
"epoch": 0.003151285967081182,
"grad_norm": 736.135009765625,
"learning_rate": 1.5757575757575759e-06,
"loss": 768.4572,
"step": 390
},
{
"epoch": 0.003232088171365315,
"grad_norm": 1238.1878662109375,
"learning_rate": 1.6161616161616162e-06,
"loss": 696.9558,
"step": 400
},
{
"epoch": 0.0033128903756494475,
"grad_norm": 1593.6436767578125,
"learning_rate": 1.6565656565656565e-06,
"loss": 670.7958,
"step": 410
},
{
"epoch": 0.0033936925799335806,
"grad_norm": 819.6708374023438,
"learning_rate": 1.6969696969696973e-06,
"loss": 581.9092,
"step": 420
},
{
"epoch": 0.0034744947842177136,
"grad_norm": 775.354248046875,
"learning_rate": 1.7373737373737376e-06,
"loss": 744.2987,
"step": 430
},
{
"epoch": 0.0035552969885018462,
"grad_norm": 744.1176147460938,
"learning_rate": 1.777777777777778e-06,
"loss": 665.0154,
"step": 440
},
{
"epoch": 0.0036360991927859793,
"grad_norm": 1123.35986328125,
"learning_rate": 1.818181818181818e-06,
"loss": 681.7526,
"step": 450
},
{
"epoch": 0.003716901397070112,
"grad_norm": 2373.274658203125,
"learning_rate": 1.8585858585858588e-06,
"loss": 654.0196,
"step": 460
},
{
"epoch": 0.003797703601354245,
"grad_norm": 1098.4403076171875,
"learning_rate": 1.8989898989898992e-06,
"loss": 657.3389,
"step": 470
},
{
"epoch": 0.003878505805638378,
"grad_norm": 1008.40966796875,
"learning_rate": 1.9393939393939395e-06,
"loss": 564.041,
"step": 480
},
{
"epoch": 0.003959308009922511,
"grad_norm": 1227.1414794921875,
"learning_rate": 1.9797979797979796e-06,
"loss": 614.5699,
"step": 490
},
{
"epoch": 0.004040110214206643,
"grad_norm": 57328.9609375,
"learning_rate": 2.0202020202020206e-06,
"loss": 596.2117,
"step": 500
},
{
"epoch": 0.004120912418490777,
"grad_norm": 2744.44873046875,
"learning_rate": 2.0606060606060607e-06,
"loss": 676.9374,
"step": 510
},
{
"epoch": 0.004201714622774909,
"grad_norm": 667.0958862304688,
"learning_rate": 2.1010101010101013e-06,
"loss": 812.1784,
"step": 520
},
{
"epoch": 0.004282516827059042,
"grad_norm": 1447.203857421875,
"learning_rate": 2.1414141414141414e-06,
"loss": 640.1749,
"step": 530
},
{
"epoch": 0.004363319031343175,
"grad_norm": 1186.56982421875,
"learning_rate": 2.181818181818182e-06,
"loss": 665.659,
"step": 540
},
{
"epoch": 0.004444121235627308,
"grad_norm": 1263.9395751953125,
"learning_rate": 2.2222222222222225e-06,
"loss": 696.9437,
"step": 550
},
{
"epoch": 0.004524923439911441,
"grad_norm": 1353.597412109375,
"learning_rate": 2.2626262626262626e-06,
"loss": 654.0818,
"step": 560
},
{
"epoch": 0.004605725644195573,
"grad_norm": 876.721923828125,
"learning_rate": 2.303030303030303e-06,
"loss": 683.512,
"step": 570
},
{
"epoch": 0.004686527848479707,
"grad_norm": 1866.14697265625,
"learning_rate": 2.3434343434343437e-06,
"loss": 727.845,
"step": 580
},
{
"epoch": 0.0047673300527638395,
"grad_norm": 667.1629028320312,
"learning_rate": 2.383838383838384e-06,
"loss": 625.2918,
"step": 590
},
{
"epoch": 0.004848132257047972,
"grad_norm": 614.8545532226562,
"learning_rate": 2.4242424242424244e-06,
"loss": 600.5201,
"step": 600
},
{
"epoch": 0.004928934461332105,
"grad_norm": 879.8363647460938,
"learning_rate": 2.4646464646464645e-06,
"loss": 509.5772,
"step": 610
},
{
"epoch": 0.005009736665616238,
"grad_norm": 1344.7303466796875,
"learning_rate": 2.5050505050505055e-06,
"loss": 639.9966,
"step": 620
},
{
"epoch": 0.005090538869900371,
"grad_norm": 1223.520751953125,
"learning_rate": 2.5454545454545456e-06,
"loss": 706.7377,
"step": 630
},
{
"epoch": 0.0051713410741845035,
"grad_norm": 1871.1324462890625,
"learning_rate": 2.585858585858586e-06,
"loss": 712.2218,
"step": 640
},
{
"epoch": 0.005252143278468637,
"grad_norm": 804.082763671875,
"learning_rate": 2.6262626262626263e-06,
"loss": 666.623,
"step": 650
},
{
"epoch": 0.00533294548275277,
"grad_norm": 10994.2314453125,
"learning_rate": 2.666666666666667e-06,
"loss": 726.5861,
"step": 660
},
{
"epoch": 0.005413747687036902,
"grad_norm": 892.67919921875,
"learning_rate": 2.7070707070707074e-06,
"loss": 596.617,
"step": 670
},
{
"epoch": 0.005494549891321035,
"grad_norm": 681.8887939453125,
"learning_rate": 2.7474747474747475e-06,
"loss": 496.8398,
"step": 680
},
{
"epoch": 0.005575352095605168,
"grad_norm": 1190.68310546875,
"learning_rate": 2.787878787878788e-06,
"loss": 684.7522,
"step": 690
},
{
"epoch": 0.005656154299889301,
"grad_norm": 1649.9376220703125,
"learning_rate": 2.8282828282828286e-06,
"loss": 717.8954,
"step": 700
},
{
"epoch": 0.0057369565041734336,
"grad_norm": 2140.240234375,
"learning_rate": 2.8686868686868687e-06,
"loss": 633.3918,
"step": 710
},
{
"epoch": 0.005817758708457567,
"grad_norm": 1765.3192138671875,
"learning_rate": 2.9090909090909093e-06,
"loss": 656.2145,
"step": 720
},
{
"epoch": 0.0058985609127417,
"grad_norm": 2076.052001953125,
"learning_rate": 2.9494949494949494e-06,
"loss": 620.9487,
"step": 730
},
{
"epoch": 0.005979363117025832,
"grad_norm": 817.6283569335938,
"learning_rate": 2.9898989898989904e-06,
"loss": 427.1178,
"step": 740
},
{
"epoch": 0.006060165321309965,
"grad_norm": 1084.86767578125,
"learning_rate": 3.0303030303030305e-06,
"loss": 560.9343,
"step": 750
},
{
"epoch": 0.006140967525594098,
"grad_norm": 786.8311767578125,
"learning_rate": 3.070707070707071e-06,
"loss": 520.7236,
"step": 760
},
{
"epoch": 0.006221769729878231,
"grad_norm": 4558.533203125,
"learning_rate": 3.111111111111111e-06,
"loss": 555.48,
"step": 770
},
{
"epoch": 0.006302571934162364,
"grad_norm": 785.0099487304688,
"learning_rate": 3.1515151515151517e-06,
"loss": 514.0267,
"step": 780
},
{
"epoch": 0.006383374138446497,
"grad_norm": 1063.466796875,
"learning_rate": 3.191919191919192e-06,
"loss": 592.0211,
"step": 790
},
{
"epoch": 0.00646417634273063,
"grad_norm": 2228.42041015625,
"learning_rate": 3.2323232323232324e-06,
"loss": 613.0331,
"step": 800
},
{
"epoch": 0.006544978547014762,
"grad_norm": 1026.750732421875,
"learning_rate": 3.2727272727272733e-06,
"loss": 639.7108,
"step": 810
},
{
"epoch": 0.006625780751298895,
"grad_norm": 2451.453369140625,
"learning_rate": 3.313131313131313e-06,
"loss": 659.9323,
"step": 820
},
{
"epoch": 0.0067065829555830285,
"grad_norm": 1183.8045654296875,
"learning_rate": 3.3535353535353536e-06,
"loss": 601.1871,
"step": 830
},
{
"epoch": 0.006787385159867161,
"grad_norm": 1861.18701171875,
"learning_rate": 3.3939393939393946e-06,
"loss": 589.3404,
"step": 840
},
{
"epoch": 0.006868187364151294,
"grad_norm": 1340.5020751953125,
"learning_rate": 3.4343434343434343e-06,
"loss": 588.7653,
"step": 850
},
{
"epoch": 0.006948989568435427,
"grad_norm": 8932.84375,
"learning_rate": 3.4747474747474752e-06,
"loss": 667.0873,
"step": 860
},
{
"epoch": 0.00702979177271956,
"grad_norm": 1353.702392578125,
"learning_rate": 3.515151515151515e-06,
"loss": 574.4604,
"step": 870
},
{
"epoch": 0.0071105939770036925,
"grad_norm": 1281.541748046875,
"learning_rate": 3.555555555555556e-06,
"loss": 683.6188,
"step": 880
},
{
"epoch": 0.007191396181287825,
"grad_norm": 3347.4111328125,
"learning_rate": 3.5959595959595965e-06,
"loss": 591.1179,
"step": 890
},
{
"epoch": 0.007272198385571959,
"grad_norm": 832.118896484375,
"learning_rate": 3.636363636363636e-06,
"loss": 509.1054,
"step": 900
},
{
"epoch": 0.007353000589856091,
"grad_norm": 3215.1875,
"learning_rate": 3.676767676767677e-06,
"loss": 634.5958,
"step": 910
},
{
"epoch": 0.007433802794140224,
"grad_norm": 1072.3865966796875,
"learning_rate": 3.7171717171717177e-06,
"loss": 525.3159,
"step": 920
},
{
"epoch": 0.007514604998424357,
"grad_norm": 1002.2363891601562,
"learning_rate": 3.757575757575758e-06,
"loss": 659.7474,
"step": 930
},
{
"epoch": 0.00759540720270849,
"grad_norm": 6355.693359375,
"learning_rate": 3.7979797979797984e-06,
"loss": 636.0396,
"step": 940
},
{
"epoch": 0.007676209406992623,
"grad_norm": 1635.6080322265625,
"learning_rate": 3.8383838383838385e-06,
"loss": 624.7128,
"step": 950
},
{
"epoch": 0.007757011611276756,
"grad_norm": 1284.3531494140625,
"learning_rate": 3.878787878787879e-06,
"loss": 637.0934,
"step": 960
},
{
"epoch": 0.007837813815560889,
"grad_norm": 2921.176025390625,
"learning_rate": 3.9191919191919196e-06,
"loss": 648.8942,
"step": 970
},
{
"epoch": 0.007918616019845021,
"grad_norm": 845.0542602539062,
"learning_rate": 3.959595959595959e-06,
"loss": 536.0546,
"step": 980
},
{
"epoch": 0.007999418224129154,
"grad_norm": 986.8812866210938,
"learning_rate": 4.000000000000001e-06,
"loss": 643.056,
"step": 990
},
{
"epoch": 0.008080220428413287,
"grad_norm": 748.7238159179688,
"learning_rate": 4.040404040404041e-06,
"loss": 587.7603,
"step": 1000
},
{
"epoch": 0.00816102263269742,
"grad_norm": 4340.18017578125,
"learning_rate": 4.080808080808081e-06,
"loss": 545.2501,
"step": 1010
},
{
"epoch": 0.008241824836981554,
"grad_norm": 1614.0966796875,
"learning_rate": 4.1212121212121215e-06,
"loss": 652.8924,
"step": 1020
},
{
"epoch": 0.008322627041265686,
"grad_norm": 10709.900390625,
"learning_rate": 4.161616161616161e-06,
"loss": 598.342,
"step": 1030
},
{
"epoch": 0.008403429245549819,
"grad_norm": 870.67578125,
"learning_rate": 4.2020202020202026e-06,
"loss": 612.1897,
"step": 1040
},
{
"epoch": 0.008484231449833951,
"grad_norm": 686.4441528320312,
"learning_rate": 4.242424242424243e-06,
"loss": 564.3604,
"step": 1050
},
{
"epoch": 0.008565033654118084,
"grad_norm": 800.3753051757812,
"learning_rate": 4.282828282828283e-06,
"loss": 515.465,
"step": 1060
},
{
"epoch": 0.008645835858402217,
"grad_norm": 1612.2467041015625,
"learning_rate": 4.323232323232323e-06,
"loss": 615.4043,
"step": 1070
},
{
"epoch": 0.00872663806268635,
"grad_norm": 1488.6268310546875,
"learning_rate": 4.363636363636364e-06,
"loss": 584.7924,
"step": 1080
},
{
"epoch": 0.008807440266970484,
"grad_norm": 1085.609619140625,
"learning_rate": 4.4040404040404044e-06,
"loss": 568.4938,
"step": 1090
},
{
"epoch": 0.008888242471254616,
"grad_norm": 1548.931884765625,
"learning_rate": 4.444444444444445e-06,
"loss": 515.5535,
"step": 1100
},
{
"epoch": 0.008969044675538749,
"grad_norm": 1010.5916137695312,
"learning_rate": 4.484848484848485e-06,
"loss": 496.3722,
"step": 1110
},
{
"epoch": 0.009049846879822881,
"grad_norm": 1959.3551025390625,
"learning_rate": 4.525252525252525e-06,
"loss": 697.1435,
"step": 1120
},
{
"epoch": 0.009130649084107014,
"grad_norm": 2370.032470703125,
"learning_rate": 4.565656565656566e-06,
"loss": 541.7345,
"step": 1130
},
{
"epoch": 0.009211451288391147,
"grad_norm": 1848.2596435546875,
"learning_rate": 4.606060606060606e-06,
"loss": 539.6271,
"step": 1140
},
{
"epoch": 0.00929225349267528,
"grad_norm": 2245.12548828125,
"learning_rate": 4.646464646464647e-06,
"loss": 485.2028,
"step": 1150
},
{
"epoch": 0.009373055696959414,
"grad_norm": 1303.244140625,
"learning_rate": 4.6868686868686874e-06,
"loss": 552.4869,
"step": 1160
},
{
"epoch": 0.009453857901243546,
"grad_norm": 813.8889770507812,
"learning_rate": 4.727272727272727e-06,
"loss": 566.426,
"step": 1170
},
{
"epoch": 0.009534660105527679,
"grad_norm": 2155.474609375,
"learning_rate": 4.767676767676768e-06,
"loss": 694.8829,
"step": 1180
},
{
"epoch": 0.009615462309811812,
"grad_norm": 3313.629150390625,
"learning_rate": 4.808080808080808e-06,
"loss": 541.8082,
"step": 1190
},
{
"epoch": 0.009696264514095944,
"grad_norm": 830.4761352539062,
"learning_rate": 4.848484848484849e-06,
"loss": 633.2839,
"step": 1200
},
{
"epoch": 0.009777066718380077,
"grad_norm": 2314.525634765625,
"learning_rate": 4.888888888888889e-06,
"loss": 465.8495,
"step": 1210
},
{
"epoch": 0.00985786892266421,
"grad_norm": 1821.2579345703125,
"learning_rate": 4.929292929292929e-06,
"loss": 543.8752,
"step": 1220
},
{
"epoch": 0.009938671126948344,
"grad_norm": 471.08380126953125,
"learning_rate": 4.96969696969697e-06,
"loss": 405.759,
"step": 1230
},
{
"epoch": 0.010019473331232476,
"grad_norm": 1476.8787841796875,
"learning_rate": 5.010101010101011e-06,
"loss": 617.1836,
"step": 1240
},
{
"epoch": 0.010100275535516609,
"grad_norm": 667.0234985351562,
"learning_rate": 5.050505050505051e-06,
"loss": 513.511,
"step": 1250
},
{
"epoch": 0.010181077739800742,
"grad_norm": 613.2576904296875,
"learning_rate": 5.090909090909091e-06,
"loss": 468.2697,
"step": 1260
},
{
"epoch": 0.010261879944084874,
"grad_norm": 1321.9991455078125,
"learning_rate": 5.131313131313131e-06,
"loss": 476.6512,
"step": 1270
},
{
"epoch": 0.010342682148369007,
"grad_norm": 840.0568237304688,
"learning_rate": 5.171717171717172e-06,
"loss": 607.1966,
"step": 1280
},
{
"epoch": 0.01042348435265314,
"grad_norm": 1686.292724609375,
"learning_rate": 5.212121212121213e-06,
"loss": 538.6932,
"step": 1290
},
{
"epoch": 0.010504286556937274,
"grad_norm": 2623.5302734375,
"learning_rate": 5.2525252525252526e-06,
"loss": 511.9114,
"step": 1300
},
{
"epoch": 0.010585088761221407,
"grad_norm": 1300.63671875,
"learning_rate": 5.292929292929293e-06,
"loss": 573.5399,
"step": 1310
},
{
"epoch": 0.01066589096550554,
"grad_norm": 828.8121948242188,
"learning_rate": 5.333333333333334e-06,
"loss": 617.9385,
"step": 1320
},
{
"epoch": 0.010746693169789672,
"grad_norm": 965.5888671875,
"learning_rate": 5.373737373737374e-06,
"loss": 428.5168,
"step": 1330
},
{
"epoch": 0.010827495374073804,
"grad_norm": 1123.3818359375,
"learning_rate": 5.414141414141415e-06,
"loss": 582.797,
"step": 1340
},
{
"epoch": 0.010908297578357937,
"grad_norm": 4098.775390625,
"learning_rate": 5.4545454545454545e-06,
"loss": 633.2868,
"step": 1350
},
{
"epoch": 0.01098909978264207,
"grad_norm": 792.64306640625,
"learning_rate": 5.494949494949495e-06,
"loss": 688.7412,
"step": 1360
},
{
"epoch": 0.011069901986926204,
"grad_norm": 1111.2113037109375,
"learning_rate": 5.5353535353535355e-06,
"loss": 473.5282,
"step": 1370
},
{
"epoch": 0.011150704191210337,
"grad_norm": 5420.9765625,
"learning_rate": 5.575757575757576e-06,
"loss": 516.3693,
"step": 1380
},
{
"epoch": 0.01123150639549447,
"grad_norm": 1263.883056640625,
"learning_rate": 5.616161616161617e-06,
"loss": 457.2747,
"step": 1390
},
{
"epoch": 0.011312308599778602,
"grad_norm": 944.7489624023438,
"learning_rate": 5.656565656565657e-06,
"loss": 539.8824,
"step": 1400
},
{
"epoch": 0.011393110804062734,
"grad_norm": 562.0636596679688,
"learning_rate": 5.696969696969697e-06,
"loss": 455.684,
"step": 1410
},
{
"epoch": 0.011473913008346867,
"grad_norm": 1293.833984375,
"learning_rate": 5.7373737373737374e-06,
"loss": 527.1053,
"step": 1420
},
{
"epoch": 0.011554715212631,
"grad_norm": 1857.3572998046875,
"learning_rate": 5.777777777777778e-06,
"loss": 519.1355,
"step": 1430
},
{
"epoch": 0.011635517416915134,
"grad_norm": 1110.5677490234375,
"learning_rate": 5.8181818181818185e-06,
"loss": 470.4913,
"step": 1440
},
{
"epoch": 0.011716319621199267,
"grad_norm": 1656.9898681640625,
"learning_rate": 5.858585858585859e-06,
"loss": 570.9559,
"step": 1450
},
{
"epoch": 0.0117971218254834,
"grad_norm": 830.130126953125,
"learning_rate": 5.898989898989899e-06,
"loss": 551.6251,
"step": 1460
},
{
"epoch": 0.011877924029767532,
"grad_norm": 718.1753540039062,
"learning_rate": 5.93939393939394e-06,
"loss": 495.0048,
"step": 1470
},
{
"epoch": 0.011958726234051665,
"grad_norm": 2903.817138671875,
"learning_rate": 5.979797979797981e-06,
"loss": 742.8938,
"step": 1480
},
{
"epoch": 0.012039528438335797,
"grad_norm": 893.6466674804688,
"learning_rate": 6.0202020202020204e-06,
"loss": 542.4396,
"step": 1490
},
{
"epoch": 0.01212033064261993,
"grad_norm": 1576.75390625,
"learning_rate": 6.060606060606061e-06,
"loss": 513.3051,
"step": 1500
},
{
"epoch": 0.012201132846904064,
"grad_norm": 1925.69140625,
"learning_rate": 6.101010101010101e-06,
"loss": 450.3941,
"step": 1510
},
{
"epoch": 0.012281935051188197,
"grad_norm": 1998.0458984375,
"learning_rate": 6.141414141414142e-06,
"loss": 635.4773,
"step": 1520
},
{
"epoch": 0.01236273725547233,
"grad_norm": 1258.9659423828125,
"learning_rate": 6.181818181818183e-06,
"loss": 595.3451,
"step": 1530
},
{
"epoch": 0.012443539459756462,
"grad_norm": 1726.843017578125,
"learning_rate": 6.222222222222222e-06,
"loss": 556.3297,
"step": 1540
},
{
"epoch": 0.012524341664040595,
"grad_norm": 2486.63134765625,
"learning_rate": 6.262626262626263e-06,
"loss": 647.7714,
"step": 1550
},
{
"epoch": 0.012605143868324727,
"grad_norm": 1644.4110107421875,
"learning_rate": 6.303030303030303e-06,
"loss": 611.4104,
"step": 1560
},
{
"epoch": 0.01268594607260886,
"grad_norm": 2169.43408203125,
"learning_rate": 6.343434343434344e-06,
"loss": 445.9718,
"step": 1570
},
{
"epoch": 0.012766748276892994,
"grad_norm": 1554.9425048828125,
"learning_rate": 6.383838383838384e-06,
"loss": 561.5079,
"step": 1580
},
{
"epoch": 0.012847550481177127,
"grad_norm": 891.0722045898438,
"learning_rate": 6.424242424242424e-06,
"loss": 510.2931,
"step": 1590
},
{
"epoch": 0.01292835268546126,
"grad_norm": 22828.455078125,
"learning_rate": 6.464646464646465e-06,
"loss": 552.862,
"step": 1600
},
{
"epoch": 0.013009154889745392,
"grad_norm": 722.2283325195312,
"learning_rate": 6.505050505050505e-06,
"loss": 512.3786,
"step": 1610
},
{
"epoch": 0.013089957094029525,
"grad_norm": 1105.0716552734375,
"learning_rate": 6.545454545454547e-06,
"loss": 532.3522,
"step": 1620
},
{
"epoch": 0.013170759298313657,
"grad_norm": 1146.0550537109375,
"learning_rate": 6.5858585858585856e-06,
"loss": 647.0639,
"step": 1630
},
{
"epoch": 0.01325156150259779,
"grad_norm": 2632.366943359375,
"learning_rate": 6.626262626262626e-06,
"loss": 613.0408,
"step": 1640
},
{
"epoch": 0.013332363706881924,
"grad_norm": 1236.950439453125,
"learning_rate": 6.666666666666667e-06,
"loss": 614.0946,
"step": 1650
},
{
"epoch": 0.013413165911166057,
"grad_norm": 973.5294799804688,
"learning_rate": 6.707070707070707e-06,
"loss": 383.6397,
"step": 1660
},
{
"epoch": 0.01349396811545019,
"grad_norm": 1101.2989501953125,
"learning_rate": 6.747474747474749e-06,
"loss": 556.5678,
"step": 1670
},
{
"epoch": 0.013574770319734322,
"grad_norm": 694.1087646484375,
"learning_rate": 6.787878787878789e-06,
"loss": 415.0727,
"step": 1680
},
{
"epoch": 0.013655572524018455,
"grad_norm": 1031.21240234375,
"learning_rate": 6.828282828282828e-06,
"loss": 508.7032,
"step": 1690
},
{
"epoch": 0.013736374728302587,
"grad_norm": 734.2860717773438,
"learning_rate": 6.8686868686868685e-06,
"loss": 482.0532,
"step": 1700
},
{
"epoch": 0.01381717693258672,
"grad_norm": 879.1790771484375,
"learning_rate": 6.909090909090909e-06,
"loss": 655.4877,
"step": 1710
},
{
"epoch": 0.013897979136870854,
"grad_norm": 1699.49560546875,
"learning_rate": 6.9494949494949505e-06,
"loss": 400.7212,
"step": 1720
},
{
"epoch": 0.013978781341154987,
"grad_norm": 1901.2413330078125,
"learning_rate": 6.989898989898991e-06,
"loss": 496.4101,
"step": 1730
},
{
"epoch": 0.01405958354543912,
"grad_norm": 1368.836669921875,
"learning_rate": 7.03030303030303e-06,
"loss": 520.5295,
"step": 1740
},
{
"epoch": 0.014140385749723252,
"grad_norm": 1587.8297119140625,
"learning_rate": 7.0707070707070704e-06,
"loss": 397.7875,
"step": 1750
},
{
"epoch": 0.014221187954007385,
"grad_norm": 1054.773681640625,
"learning_rate": 7.111111111111112e-06,
"loss": 456.9818,
"step": 1760
},
{
"epoch": 0.014301990158291518,
"grad_norm": 3368.4013671875,
"learning_rate": 7.151515151515152e-06,
"loss": 397.1987,
"step": 1770
},
{
"epoch": 0.01438279236257565,
"grad_norm": 1371.4473876953125,
"learning_rate": 7.191919191919193e-06,
"loss": 433.1569,
"step": 1780
},
{
"epoch": 0.014463594566859785,
"grad_norm": 1193.6456298828125,
"learning_rate": 7.232323232323232e-06,
"loss": 368.5404,
"step": 1790
},
{
"epoch": 0.014544396771143917,
"grad_norm": 976.5448608398438,
"learning_rate": 7.272727272727272e-06,
"loss": 427.9085,
"step": 1800
},
{
"epoch": 0.01462519897542805,
"grad_norm": 665.8095092773438,
"learning_rate": 7.313131313131314e-06,
"loss": 461.1592,
"step": 1810
},
{
"epoch": 0.014706001179712182,
"grad_norm": 1352.773681640625,
"learning_rate": 7.353535353535354e-06,
"loss": 421.6953,
"step": 1820
},
{
"epoch": 0.014786803383996315,
"grad_norm": 1284.1273193359375,
"learning_rate": 7.393939393939395e-06,
"loss": 486.5889,
"step": 1830
},
{
"epoch": 0.014867605588280448,
"grad_norm": 3249.41650390625,
"learning_rate": 7.434343434343435e-06,
"loss": 534.9857,
"step": 1840
},
{
"epoch": 0.01494840779256458,
"grad_norm": 1162.9168701171875,
"learning_rate": 7.474747474747475e-06,
"loss": 684.8136,
"step": 1850
},
{
"epoch": 0.015029209996848715,
"grad_norm": 2700.0771484375,
"learning_rate": 7.515151515151516e-06,
"loss": 438.9466,
"step": 1860
},
{
"epoch": 0.015110012201132847,
"grad_norm": 1472.51171875,
"learning_rate": 7.555555555555556e-06,
"loss": 541.1812,
"step": 1870
},
{
"epoch": 0.01519081440541698,
"grad_norm": 763.5363159179688,
"learning_rate": 7.595959595959597e-06,
"loss": 574.8496,
"step": 1880
},
{
"epoch": 0.015271616609701113,
"grad_norm": 1097.047607421875,
"learning_rate": 7.636363636363638e-06,
"loss": 495.2874,
"step": 1890
},
{
"epoch": 0.015352418813985245,
"grad_norm": 1086.6981201171875,
"learning_rate": 7.676767676767677e-06,
"loss": 494.5146,
"step": 1900
},
{
"epoch": 0.015433221018269378,
"grad_norm": 1003.9575805664062,
"learning_rate": 7.717171717171717e-06,
"loss": 493.7404,
"step": 1910
},
{
"epoch": 0.015514023222553512,
"grad_norm": 1008.3009643554688,
"learning_rate": 7.757575757575758e-06,
"loss": 507.6875,
"step": 1920
},
{
"epoch": 0.015594825426837645,
"grad_norm": 2552.92626953125,
"learning_rate": 7.797979797979799e-06,
"loss": 420.8823,
"step": 1930
},
{
"epoch": 0.015675627631121777,
"grad_norm": 735.5524291992188,
"learning_rate": 7.838383838383839e-06,
"loss": 519.3292,
"step": 1940
},
{
"epoch": 0.01575642983540591,
"grad_norm": 1559.79296875,
"learning_rate": 7.878787878787878e-06,
"loss": 468.1747,
"step": 1950
},
{
"epoch": 0.015837232039690043,
"grad_norm": 1734.436767578125,
"learning_rate": 7.919191919191919e-06,
"loss": 417.3558,
"step": 1960
},
{
"epoch": 0.015918034243974175,
"grad_norm": 2985.763671875,
"learning_rate": 7.959595959595959e-06,
"loss": 427.185,
"step": 1970
},
{
"epoch": 0.015998836448258308,
"grad_norm": 1284.0130615234375,
"learning_rate": 8.000000000000001e-06,
"loss": 523.1269,
"step": 1980
},
{
"epoch": 0.01607963865254244,
"grad_norm": 1079.0145263671875,
"learning_rate": 8.040404040404042e-06,
"loss": 497.3142,
"step": 1990
},
{
"epoch": 0.016160440856826573,
"grad_norm": 1260.9786376953125,
"learning_rate": 8.080808080808082e-06,
"loss": 495.9584,
"step": 2000
},
{
"epoch": 0.016241243061110706,
"grad_norm": 4998.38525390625,
"learning_rate": 8.121212121212121e-06,
"loss": 425.5371,
"step": 2010
},
{
"epoch": 0.01632204526539484,
"grad_norm": 1627.860595703125,
"learning_rate": 8.161616161616162e-06,
"loss": 547.4274,
"step": 2020
},
{
"epoch": 0.016402847469678974,
"grad_norm": 1790.0679931640625,
"learning_rate": 8.202020202020202e-06,
"loss": 519.9649,
"step": 2030
},
{
"epoch": 0.016483649673963107,
"grad_norm": 1015.8907470703125,
"learning_rate": 8.242424242424243e-06,
"loss": 439.1214,
"step": 2040
},
{
"epoch": 0.01656445187824724,
"grad_norm": 1249.7930908203125,
"learning_rate": 8.282828282828283e-06,
"loss": 493.0195,
"step": 2050
},
{
"epoch": 0.016645254082531372,
"grad_norm": 7022.84423828125,
"learning_rate": 8.323232323232322e-06,
"loss": 497.0867,
"step": 2060
},
{
"epoch": 0.016726056286815505,
"grad_norm": 985.5494384765625,
"learning_rate": 8.363636363636365e-06,
"loss": 464.8873,
"step": 2070
},
{
"epoch": 0.016806858491099638,
"grad_norm": 977.4537353515625,
"learning_rate": 8.404040404040405e-06,
"loss": 548.6022,
"step": 2080
},
{
"epoch": 0.01688766069538377,
"grad_norm": 1503.036376953125,
"learning_rate": 8.444444444444446e-06,
"loss": 535.7155,
"step": 2090
},
{
"epoch": 0.016968462899667903,
"grad_norm": 876.1661987304688,
"learning_rate": 8.484848484848486e-06,
"loss": 439.9817,
"step": 2100
},
{
"epoch": 0.017049265103952035,
"grad_norm": 631.0904541015625,
"learning_rate": 8.525252525252525e-06,
"loss": 432.0362,
"step": 2110
},
{
"epoch": 0.017130067308236168,
"grad_norm": 1009.0225830078125,
"learning_rate": 8.565656565656566e-06,
"loss": 560.3087,
"step": 2120
},
{
"epoch": 0.0172108695125203,
"grad_norm": 893.6896362304688,
"learning_rate": 8.606060606060606e-06,
"loss": 574.9824,
"step": 2130
},
{
"epoch": 0.017291671716804433,
"grad_norm": 1586.4207763671875,
"learning_rate": 8.646464646464647e-06,
"loss": 541.0257,
"step": 2140
},
{
"epoch": 0.017372473921088566,
"grad_norm": 1161.7142333984375,
"learning_rate": 8.686868686868687e-06,
"loss": 360.7528,
"step": 2150
},
{
"epoch": 0.0174532761253727,
"grad_norm": 1364.39697265625,
"learning_rate": 8.727272727272728e-06,
"loss": 511.5766,
"step": 2160
},
{
"epoch": 0.017534078329656835,
"grad_norm": 1430.6226806640625,
"learning_rate": 8.767676767676768e-06,
"loss": 374.0022,
"step": 2170
},
{
"epoch": 0.017614880533940967,
"grad_norm": 944.2110595703125,
"learning_rate": 8.808080808080809e-06,
"loss": 385.8277,
"step": 2180
},
{
"epoch": 0.0176956827382251,
"grad_norm": 1198.7069091796875,
"learning_rate": 8.84848484848485e-06,
"loss": 459.1448,
"step": 2190
},
{
"epoch": 0.017776484942509233,
"grad_norm": 969.3467407226562,
"learning_rate": 8.88888888888889e-06,
"loss": 472.9384,
"step": 2200
},
{
"epoch": 0.017857287146793365,
"grad_norm": 1518.4356689453125,
"learning_rate": 8.92929292929293e-06,
"loss": 389.709,
"step": 2210
},
{
"epoch": 0.017938089351077498,
"grad_norm": 1016.8683471679688,
"learning_rate": 8.96969696969697e-06,
"loss": 416.4938,
"step": 2220
},
{
"epoch": 0.01801889155536163,
"grad_norm": 1106.466796875,
"learning_rate": 9.01010101010101e-06,
"loss": 430.7052,
"step": 2230
},
{
"epoch": 0.018099693759645763,
"grad_norm": 959.3176879882812,
"learning_rate": 9.05050505050505e-06,
"loss": 439.8072,
"step": 2240
},
{
"epoch": 0.018180495963929896,
"grad_norm": 892.5261840820312,
"learning_rate": 9.090909090909091e-06,
"loss": 433.3271,
"step": 2250
},
{
"epoch": 0.018261298168214028,
"grad_norm": 2410.990234375,
"learning_rate": 9.131313131313132e-06,
"loss": 393.4176,
"step": 2260
},
{
"epoch": 0.01834210037249816,
"grad_norm": 1493.900390625,
"learning_rate": 9.171717171717172e-06,
"loss": 412.9764,
"step": 2270
},
{
"epoch": 0.018422902576782293,
"grad_norm": 2805.1650390625,
"learning_rate": 9.212121212121213e-06,
"loss": 625.3552,
"step": 2280
},
{
"epoch": 0.018503704781066426,
"grad_norm": 1327.8782958984375,
"learning_rate": 9.252525252525253e-06,
"loss": 473.8814,
"step": 2290
},
{
"epoch": 0.01858450698535056,
"grad_norm": 1174.0333251953125,
"learning_rate": 9.292929292929294e-06,
"loss": 557.2924,
"step": 2300
},
{
"epoch": 0.018665309189634695,
"grad_norm": 779.054931640625,
"learning_rate": 9.333333333333334e-06,
"loss": 340.7218,
"step": 2310
},
{
"epoch": 0.018746111393918827,
"grad_norm": 1373.9344482421875,
"learning_rate": 9.373737373737375e-06,
"loss": 364.399,
"step": 2320
},
{
"epoch": 0.01882691359820296,
"grad_norm": 1212.10595703125,
"learning_rate": 9.414141414141414e-06,
"loss": 532.5891,
"step": 2330
},
{
"epoch": 0.018907715802487093,
"grad_norm": 1213.900146484375,
"learning_rate": 9.454545454545454e-06,
"loss": 452.1891,
"step": 2340
},
{
"epoch": 0.018988518006771225,
"grad_norm": 5826.9482421875,
"learning_rate": 9.494949494949495e-06,
"loss": 416.3645,
"step": 2350
},
{
"epoch": 0.019069320211055358,
"grad_norm": 6701.46630859375,
"learning_rate": 9.535353535353535e-06,
"loss": 441.6159,
"step": 2360
},
{
"epoch": 0.01915012241533949,
"grad_norm": 2676.863525390625,
"learning_rate": 9.575757575757578e-06,
"loss": 418.8925,
"step": 2370
},
{
"epoch": 0.019230924619623623,
"grad_norm": 1438.232177734375,
"learning_rate": 9.616161616161616e-06,
"loss": 484.5964,
"step": 2380
},
{
"epoch": 0.019311726823907756,
"grad_norm": 2674.849853515625,
"learning_rate": 9.656565656565657e-06,
"loss": 448.1574,
"step": 2390
},
{
"epoch": 0.01939252902819189,
"grad_norm": 1142.754638671875,
"learning_rate": 9.696969696969698e-06,
"loss": 368.5235,
"step": 2400
},
{
"epoch": 0.01947333123247602,
"grad_norm": 1908.450927734375,
"learning_rate": 9.737373737373738e-06,
"loss": 448.1265,
"step": 2410
},
{
"epoch": 0.019554133436760154,
"grad_norm": 930.4298706054688,
"learning_rate": 9.777777777777779e-06,
"loss": 412.3102,
"step": 2420
},
{
"epoch": 0.019634935641044286,
"grad_norm": 885.7802124023438,
"learning_rate": 9.818181818181818e-06,
"loss": 425.3447,
"step": 2430
},
{
"epoch": 0.01971573784532842,
"grad_norm": 1650.6605224609375,
"learning_rate": 9.858585858585858e-06,
"loss": 458.773,
"step": 2440
},
{
"epoch": 0.019796540049612555,
"grad_norm": 1456.41943359375,
"learning_rate": 9.898989898989899e-06,
"loss": 453.6257,
"step": 2450
},
{
"epoch": 0.019877342253896688,
"grad_norm": 1131.8419189453125,
"learning_rate": 9.93939393939394e-06,
"loss": 404.5182,
"step": 2460
},
{
"epoch": 0.01995814445818082,
"grad_norm": 1003.8864135742188,
"learning_rate": 9.979797979797981e-06,
"loss": 494.8275,
"step": 2470
},
{
"epoch": 0.020038946662464953,
"grad_norm": 3438.40673828125,
"learning_rate": 1.0020202020202022e-05,
"loss": 434.0848,
"step": 2480
},
{
"epoch": 0.020119748866749085,
"grad_norm": 1796.56298828125,
"learning_rate": 1.006060606060606e-05,
"loss": 568.4981,
"step": 2490
},
{
"epoch": 0.020200551071033218,
"grad_norm": 2394.705322265625,
"learning_rate": 1.0101010101010101e-05,
"loss": 468.4812,
"step": 2500
},
{
"epoch": 0.02028135327531735,
"grad_norm": 1344.266845703125,
"learning_rate": 1.0141414141414142e-05,
"loss": 344.5415,
"step": 2510
},
{
"epoch": 0.020362155479601483,
"grad_norm": 1845.0267333984375,
"learning_rate": 1.0181818181818182e-05,
"loss": 466.122,
"step": 2520
},
{
"epoch": 0.020442957683885616,
"grad_norm": 2368.758544921875,
"learning_rate": 1.0222222222222223e-05,
"loss": 449.0622,
"step": 2530
},
{
"epoch": 0.02052375988816975,
"grad_norm": 1838.78955078125,
"learning_rate": 1.0262626262626262e-05,
"loss": 408.0261,
"step": 2540
},
{
"epoch": 0.02060456209245388,
"grad_norm": 1171.175537109375,
"learning_rate": 1.0303030303030304e-05,
"loss": 438.3216,
"step": 2550
},
{
"epoch": 0.020685364296738014,
"grad_norm": 997.9658813476562,
"learning_rate": 1.0343434343434345e-05,
"loss": 396.0697,
"step": 2560
},
{
"epoch": 0.020766166501022146,
"grad_norm": 955.8753662109375,
"learning_rate": 1.0383838383838385e-05,
"loss": 352.4154,
"step": 2570
},
{
"epoch": 0.02084696870530628,
"grad_norm": 1111.3548583984375,
"learning_rate": 1.0424242424242426e-05,
"loss": 408.0275,
"step": 2580
},
{
"epoch": 0.020927770909590415,
"grad_norm": 1489.833740234375,
"learning_rate": 1.0464646464646465e-05,
"loss": 442.6512,
"step": 2590
},
{
"epoch": 0.021008573113874548,
"grad_norm": 1263.6846923828125,
"learning_rate": 1.0505050505050505e-05,
"loss": 451.8872,
"step": 2600
},
{
"epoch": 0.02108937531815868,
"grad_norm": 1563.267333984375,
"learning_rate": 1.0545454545454546e-05,
"loss": 433.7399,
"step": 2610
},
{
"epoch": 0.021170177522442813,
"grad_norm": 2102.853515625,
"learning_rate": 1.0585858585858586e-05,
"loss": 486.5411,
"step": 2620
},
{
"epoch": 0.021250979726726946,
"grad_norm": 1215.5638427734375,
"learning_rate": 1.0626262626262627e-05,
"loss": 489.4264,
"step": 2630
},
{
"epoch": 0.02133178193101108,
"grad_norm": 1669.6094970703125,
"learning_rate": 1.0666666666666667e-05,
"loss": 421.0313,
"step": 2640
},
{
"epoch": 0.02141258413529521,
"grad_norm": 1581.879150390625,
"learning_rate": 1.0707070707070708e-05,
"loss": 446.2861,
"step": 2650
},
{
"epoch": 0.021493386339579344,
"grad_norm": 3905.970703125,
"learning_rate": 1.0747474747474748e-05,
"loss": 455.1569,
"step": 2660
},
{
"epoch": 0.021574188543863476,
"grad_norm": 3564.063232421875,
"learning_rate": 1.0787878787878789e-05,
"loss": 404.7379,
"step": 2670
},
{
"epoch": 0.02165499074814761,
"grad_norm": 1134.174072265625,
"learning_rate": 1.082828282828283e-05,
"loss": 479.0893,
"step": 2680
},
{
"epoch": 0.02173579295243174,
"grad_norm": 1493.994873046875,
"learning_rate": 1.086868686868687e-05,
"loss": 487.3219,
"step": 2690
},
{
"epoch": 0.021816595156715874,
"grad_norm": 1302.6314697265625,
"learning_rate": 1.0909090909090909e-05,
"loss": 517.1011,
"step": 2700
},
{
"epoch": 0.021897397361000007,
"grad_norm": 2952.864013671875,
"learning_rate": 1.094949494949495e-05,
"loss": 462.4752,
"step": 2710
},
{
"epoch": 0.02197819956528414,
"grad_norm": 1392.331298828125,
"learning_rate": 1.098989898989899e-05,
"loss": 494.226,
"step": 2720
},
{
"epoch": 0.022059001769568275,
"grad_norm": 2029.0225830078125,
"learning_rate": 1.103030303030303e-05,
"loss": 636.2813,
"step": 2730
},
{
"epoch": 0.022139803973852408,
"grad_norm": 3201.144287109375,
"learning_rate": 1.1070707070707071e-05,
"loss": 470.682,
"step": 2740
},
{
"epoch": 0.02222060617813654,
"grad_norm": 2171.435791015625,
"learning_rate": 1.1111111111111112e-05,
"loss": 514.9135,
"step": 2750
},
{
"epoch": 0.022301408382420673,
"grad_norm": 760.407470703125,
"learning_rate": 1.1151515151515152e-05,
"loss": 396.2693,
"step": 2760
},
{
"epoch": 0.022382210586704806,
"grad_norm": 5750.24951171875,
"learning_rate": 1.1191919191919193e-05,
"loss": 394.7851,
"step": 2770
},
{
"epoch": 0.02246301279098894,
"grad_norm": 2005.41650390625,
"learning_rate": 1.1232323232323233e-05,
"loss": 399.2545,
"step": 2780
},
{
"epoch": 0.02254381499527307,
"grad_norm": 2525.82080078125,
"learning_rate": 1.1272727272727274e-05,
"loss": 303.4952,
"step": 2790
},
{
"epoch": 0.022624617199557204,
"grad_norm": 1201.04248046875,
"learning_rate": 1.1313131313131314e-05,
"loss": 421.5876,
"step": 2800
},
{
"epoch": 0.022705419403841336,
"grad_norm": 1753.299560546875,
"learning_rate": 1.1353535353535353e-05,
"loss": 401.7698,
"step": 2810
},
{
"epoch": 0.02278622160812547,
"grad_norm": 1007.8385009765625,
"learning_rate": 1.1393939393939394e-05,
"loss": 481.1457,
"step": 2820
},
{
"epoch": 0.0228670238124096,
"grad_norm": 1377.8157958984375,
"learning_rate": 1.1434343434343434e-05,
"loss": 474.0957,
"step": 2830
},
{
"epoch": 0.022947826016693734,
"grad_norm": 769.2996826171875,
"learning_rate": 1.1474747474747475e-05,
"loss": 358.7357,
"step": 2840
},
{
"epoch": 0.023028628220977867,
"grad_norm": 1722.425048828125,
"learning_rate": 1.1515151515151517e-05,
"loss": 526.5532,
"step": 2850
},
{
"epoch": 0.023109430425262,
"grad_norm": 890.7434692382812,
"learning_rate": 1.1555555555555556e-05,
"loss": 374.7328,
"step": 2860
},
{
"epoch": 0.023190232629546136,
"grad_norm": 1437.0924072265625,
"learning_rate": 1.1595959595959597e-05,
"loss": 354.1128,
"step": 2870
},
{
"epoch": 0.023271034833830268,
"grad_norm": 1823.4405517578125,
"learning_rate": 1.1636363636363637e-05,
"loss": 497.2729,
"step": 2880
},
{
"epoch": 0.0233518370381144,
"grad_norm": 1081.6297607421875,
"learning_rate": 1.1676767676767678e-05,
"loss": 381.0187,
"step": 2890
},
{
"epoch": 0.023432639242398533,
"grad_norm": 2291.82763671875,
"learning_rate": 1.1717171717171718e-05,
"loss": 439.2377,
"step": 2900
},
{
"epoch": 0.023513441446682666,
"grad_norm": 1737.9361572265625,
"learning_rate": 1.1757575757575757e-05,
"loss": 363.3017,
"step": 2910
},
{
"epoch": 0.0235942436509668,
"grad_norm": 1336.8502197265625,
"learning_rate": 1.1797979797979798e-05,
"loss": 420.6219,
"step": 2920
},
{
"epoch": 0.02367504585525093,
"grad_norm": 1407.14208984375,
"learning_rate": 1.1838383838383838e-05,
"loss": 586.4723,
"step": 2930
},
{
"epoch": 0.023755848059535064,
"grad_norm": 1774.9156494140625,
"learning_rate": 1.187878787878788e-05,
"loss": 407.5993,
"step": 2940
},
{
"epoch": 0.023836650263819197,
"grad_norm": 3886.3212890625,
"learning_rate": 1.1919191919191921e-05,
"loss": 488.9637,
"step": 2950
},
{
"epoch": 0.02391745246810333,
"grad_norm": 977.4032592773438,
"learning_rate": 1.1959595959595961e-05,
"loss": 349.8829,
"step": 2960
},
{
"epoch": 0.023998254672387462,
"grad_norm": 1776.18994140625,
"learning_rate": 1.2e-05,
"loss": 378.9573,
"step": 2970
},
{
"epoch": 0.024079056876671594,
"grad_norm": 982.6826171875,
"learning_rate": 1.2040404040404041e-05,
"loss": 385.4363,
"step": 2980
},
{
"epoch": 0.024159859080955727,
"grad_norm": 1428.6072998046875,
"learning_rate": 1.2080808080808081e-05,
"loss": 411.3545,
"step": 2990
},
{
"epoch": 0.02424066128523986,
"grad_norm": 2080.078125,
"learning_rate": 1.2121212121212122e-05,
"loss": 307.3638,
"step": 3000
},
{
"epoch": 0.024321463489523996,
"grad_norm": 2622.278076171875,
"learning_rate": 1.2161616161616162e-05,
"loss": 336.3731,
"step": 3010
},
{
"epoch": 0.02440226569380813,
"grad_norm": 1076.52587890625,
"learning_rate": 1.2202020202020201e-05,
"loss": 389.7547,
"step": 3020
},
{
"epoch": 0.02448306789809226,
"grad_norm": 2400.458740234375,
"learning_rate": 1.2242424242424242e-05,
"loss": 401.2474,
"step": 3030
},
{
"epoch": 0.024563870102376394,
"grad_norm": 2083.1240234375,
"learning_rate": 1.2282828282828284e-05,
"loss": 405.6909,
"step": 3040
},
{
"epoch": 0.024644672306660526,
"grad_norm": 3678.456787109375,
"learning_rate": 1.2323232323232325e-05,
"loss": 425.2927,
"step": 3050
},
{
"epoch": 0.02472547451094466,
"grad_norm": 2447.279296875,
"learning_rate": 1.2363636363636365e-05,
"loss": 536.6354,
"step": 3060
},
{
"epoch": 0.02480627671522879,
"grad_norm": 1382.6463623046875,
"learning_rate": 1.2404040404040404e-05,
"loss": 417.6737,
"step": 3070
},
{
"epoch": 0.024887078919512924,
"grad_norm": 878.1357421875,
"learning_rate": 1.2444444444444445e-05,
"loss": 403.2435,
"step": 3080
},
{
"epoch": 0.024967881123797057,
"grad_norm": 1655.641845703125,
"learning_rate": 1.2484848484848485e-05,
"loss": 398.7163,
"step": 3090
},
{
"epoch": 0.02504868332808119,
"grad_norm": 1916.8333740234375,
"learning_rate": 1.2525252525252526e-05,
"loss": 417.5105,
"step": 3100
},
{
"epoch": 0.025129485532365322,
"grad_norm": 3886.349853515625,
"learning_rate": 1.2565656565656566e-05,
"loss": 481.2548,
"step": 3110
},
{
"epoch": 0.025210287736649455,
"grad_norm": 1206.383056640625,
"learning_rate": 1.2606060606060607e-05,
"loss": 356.7375,
"step": 3120
},
{
"epoch": 0.025291089940933587,
"grad_norm": 1333.0904541015625,
"learning_rate": 1.2646464646464647e-05,
"loss": 418.2427,
"step": 3130
},
{
"epoch": 0.02537189214521772,
"grad_norm": 1487.70458984375,
"learning_rate": 1.2686868686868688e-05,
"loss": 496.2586,
"step": 3140
},
{
"epoch": 0.025452694349501856,
"grad_norm": 1888.8553466796875,
"learning_rate": 1.2727272727272727e-05,
"loss": 463.9542,
"step": 3150
},
{
"epoch": 0.02553349655378599,
"grad_norm": 1996.4786376953125,
"learning_rate": 1.2767676767676767e-05,
"loss": 355.7204,
"step": 3160
},
{
"epoch": 0.02561429875807012,
"grad_norm": 3336.894775390625,
"learning_rate": 1.2808080808080808e-05,
"loss": 491.6457,
"step": 3170
},
{
"epoch": 0.025695100962354254,
"grad_norm": 1442.059814453125,
"learning_rate": 1.2848484848484848e-05,
"loss": 440.1709,
"step": 3180
},
{
"epoch": 0.025775903166638386,
"grad_norm": 1862.8482666015625,
"learning_rate": 1.2888888888888889e-05,
"loss": 432.363,
"step": 3190
},
{
"epoch": 0.02585670537092252,
"grad_norm": 1185.944091796875,
"learning_rate": 1.292929292929293e-05,
"loss": 493.8533,
"step": 3200
},
{
"epoch": 0.02593750757520665,
"grad_norm": 2231.38134765625,
"learning_rate": 1.296969696969697e-05,
"loss": 399.7351,
"step": 3210
},
{
"epoch": 0.026018309779490784,
"grad_norm": 2386.878173828125,
"learning_rate": 1.301010101010101e-05,
"loss": 428.7118,
"step": 3220
},
{
"epoch": 0.026099111983774917,
"grad_norm": 1025.3896484375,
"learning_rate": 1.3050505050505051e-05,
"loss": 400.1819,
"step": 3230
},
{
"epoch": 0.02617991418805905,
"grad_norm": 1678.713134765625,
"learning_rate": 1.3090909090909093e-05,
"loss": 434.1818,
"step": 3240
},
{
"epoch": 0.026260716392343182,
"grad_norm": 1201.3856201171875,
"learning_rate": 1.3131313131313134e-05,
"loss": 395.3099,
"step": 3250
},
{
"epoch": 0.026341518596627315,
"grad_norm": 5197.4033203125,
"learning_rate": 1.3171717171717171e-05,
"loss": 396.583,
"step": 3260
},
{
"epoch": 0.026422320800911447,
"grad_norm": 1856.5380859375,
"learning_rate": 1.3212121212121212e-05,
"loss": 440.6047,
"step": 3270
},
{
"epoch": 0.02650312300519558,
"grad_norm": 1735.7449951171875,
"learning_rate": 1.3252525252525252e-05,
"loss": 355.502,
"step": 3280
},
{
"epoch": 0.026583925209479716,
"grad_norm": 1101.4957275390625,
"learning_rate": 1.3292929292929293e-05,
"loss": 320.7389,
"step": 3290
},
{
"epoch": 0.02666472741376385,
"grad_norm": 1054.079345703125,
"learning_rate": 1.3333333333333333e-05,
"loss": 356.3952,
"step": 3300
},
{
"epoch": 0.02674552961804798,
"grad_norm": 2106.282958984375,
"learning_rate": 1.3373737373737374e-05,
"loss": 403.6069,
"step": 3310
},
{
"epoch": 0.026826331822332114,
"grad_norm": 1672.3568115234375,
"learning_rate": 1.3414141414141414e-05,
"loss": 448.6192,
"step": 3320
},
{
"epoch": 0.026907134026616247,
"grad_norm": 1342.961181640625,
"learning_rate": 1.3454545454545457e-05,
"loss": 332.1857,
"step": 3330
},
{
"epoch": 0.02698793623090038,
"grad_norm": 991.6637573242188,
"learning_rate": 1.3494949494949497e-05,
"loss": 336.3228,
"step": 3340
},
{
"epoch": 0.027068738435184512,
"grad_norm": 3500.915771484375,
"learning_rate": 1.3535353535353538e-05,
"loss": 469.8078,
"step": 3350
},
{
"epoch": 0.027149540639468644,
"grad_norm": 1306.3658447265625,
"learning_rate": 1.3575757575757578e-05,
"loss": 412.32,
"step": 3360
},
{
"epoch": 0.027230342843752777,
"grad_norm": 1543.1270751953125,
"learning_rate": 1.3616161616161615e-05,
"loss": 568.8118,
"step": 3370
},
{
"epoch": 0.02731114504803691,
"grad_norm": 2236.9619140625,
"learning_rate": 1.3656565656565656e-05,
"loss": 499.9743,
"step": 3380
},
{
"epoch": 0.027391947252321042,
"grad_norm": 2010.0272216796875,
"learning_rate": 1.3696969696969697e-05,
"loss": 390.202,
"step": 3390
},
{
"epoch": 0.027472749456605175,
"grad_norm": 2621.369873046875,
"learning_rate": 1.3737373737373737e-05,
"loss": 447.2703,
"step": 3400
},
{
"epoch": 0.027553551660889308,
"grad_norm": 1979.689697265625,
"learning_rate": 1.3777777777777778e-05,
"loss": 364.1719,
"step": 3410
},
{
"epoch": 0.02763435386517344,
"grad_norm": 1638.1971435546875,
"learning_rate": 1.3818181818181818e-05,
"loss": 330.4022,
"step": 3420
},
{
"epoch": 0.027715156069457576,
"grad_norm": 1491.321533203125,
"learning_rate": 1.385858585858586e-05,
"loss": 381.2672,
"step": 3430
},
{
"epoch": 0.02779595827374171,
"grad_norm": 1468.6680908203125,
"learning_rate": 1.3898989898989901e-05,
"loss": 464.3973,
"step": 3440
},
{
"epoch": 0.02787676047802584,
"grad_norm": 4992.0,
"learning_rate": 1.3939393939393942e-05,
"loss": 414.7291,
"step": 3450
},
{
"epoch": 0.027957562682309974,
"grad_norm": 739.7650756835938,
"learning_rate": 1.3979797979797982e-05,
"loss": 369.1546,
"step": 3460
},
{
"epoch": 0.028038364886594107,
"grad_norm": 1464.85009765625,
"learning_rate": 1.402020202020202e-05,
"loss": 346.7063,
"step": 3470
},
{
"epoch": 0.02811916709087824,
"grad_norm": 1048.708984375,
"learning_rate": 1.406060606060606e-05,
"loss": 321.1962,
"step": 3480
},
{
"epoch": 0.028199969295162372,
"grad_norm": 4313.70556640625,
"learning_rate": 1.41010101010101e-05,
"loss": 353.4636,
"step": 3490
},
{
"epoch": 0.028280771499446505,
"grad_norm": 2037.3917236328125,
"learning_rate": 1.4141414141414141e-05,
"loss": 399.9984,
"step": 3500
},
{
"epoch": 0.028361573703730637,
"grad_norm": 2155.431396484375,
"learning_rate": 1.4181818181818181e-05,
"loss": 370.1783,
"step": 3510
},
{
"epoch": 0.02844237590801477,
"grad_norm": 4556.9873046875,
"learning_rate": 1.4222222222222224e-05,
"loss": 342.3656,
"step": 3520
},
{
"epoch": 0.028523178112298903,
"grad_norm": 1619.715087890625,
"learning_rate": 1.4262626262626264e-05,
"loss": 399.8575,
"step": 3530
},
{
"epoch": 0.028603980316583035,
"grad_norm": 1036.035400390625,
"learning_rate": 1.4303030303030305e-05,
"loss": 336.8583,
"step": 3540
},
{
"epoch": 0.028684782520867168,
"grad_norm": 896.1290893554688,
"learning_rate": 1.4343434343434345e-05,
"loss": 397.4199,
"step": 3550
},
{
"epoch": 0.0287655847251513,
"grad_norm": 871.9555053710938,
"learning_rate": 1.4383838383838386e-05,
"loss": 421.5666,
"step": 3560
},
{
"epoch": 0.028846386929435437,
"grad_norm": 1713.01806640625,
"learning_rate": 1.4424242424242426e-05,
"loss": 385.2063,
"step": 3570
},
{
"epoch": 0.02892718913371957,
"grad_norm": 6706.7744140625,
"learning_rate": 1.4464646464646464e-05,
"loss": 574.8361,
"step": 3580
},
{
"epoch": 0.029007991338003702,
"grad_norm": 47397.72265625,
"learning_rate": 1.4505050505050504e-05,
"loss": 529.9805,
"step": 3590
},
{
"epoch": 0.029088793542287834,
"grad_norm": 2835.7958984375,
"learning_rate": 1.4545454545454545e-05,
"loss": 392.688,
"step": 3600
},
{
"epoch": 0.029169595746571967,
"grad_norm": 1097.8177490234375,
"learning_rate": 1.4585858585858587e-05,
"loss": 359.7729,
"step": 3610
},
{
"epoch": 0.0292503979508561,
"grad_norm": 954.6292114257812,
"learning_rate": 1.4626262626262627e-05,
"loss": 392.3154,
"step": 3620
},
{
"epoch": 0.029331200155140232,
"grad_norm": 3372.08251953125,
"learning_rate": 1.4666666666666668e-05,
"loss": 400.445,
"step": 3630
},
{
"epoch": 0.029412002359424365,
"grad_norm": 3138.397705078125,
"learning_rate": 1.4707070707070709e-05,
"loss": 341.2362,
"step": 3640
},
{
"epoch": 0.029492804563708497,
"grad_norm": 1593.2418212890625,
"learning_rate": 1.4747474747474749e-05,
"loss": 371.5777,
"step": 3650
},
{
"epoch": 0.02957360676799263,
"grad_norm": 1874.13037109375,
"learning_rate": 1.478787878787879e-05,
"loss": 445.4564,
"step": 3660
},
{
"epoch": 0.029654408972276763,
"grad_norm": 1240.8599853515625,
"learning_rate": 1.482828282828283e-05,
"loss": 383.5819,
"step": 3670
},
{
"epoch": 0.029735211176560895,
"grad_norm": 6037.068359375,
"learning_rate": 1.486868686868687e-05,
"loss": 345.8714,
"step": 3680
},
{
"epoch": 0.029816013380845028,
"grad_norm": 6898.64990234375,
"learning_rate": 1.4909090909090908e-05,
"loss": 316.7964,
"step": 3690
},
{
"epoch": 0.02989681558512916,
"grad_norm": 1123.4033203125,
"learning_rate": 1.494949494949495e-05,
"loss": 469.9616,
"step": 3700
},
{
"epoch": 0.029977617789413297,
"grad_norm": 1321.8525390625,
"learning_rate": 1.498989898989899e-05,
"loss": 391.1738,
"step": 3710
},
{
"epoch": 0.03005841999369743,
"grad_norm": 2863.68212890625,
"learning_rate": 1.5030303030303031e-05,
"loss": 295.6113,
"step": 3720
},
{
"epoch": 0.030139222197981562,
"grad_norm": 1591.6097412109375,
"learning_rate": 1.5070707070707072e-05,
"loss": 446.7923,
"step": 3730
},
{
"epoch": 0.030220024402265695,
"grad_norm": 2339.865234375,
"learning_rate": 1.5111111111111112e-05,
"loss": 401.7253,
"step": 3740
},
{
"epoch": 0.030300826606549827,
"grad_norm": 1764.2125244140625,
"learning_rate": 1.5151515151515153e-05,
"loss": 439.8055,
"step": 3750
},
{
"epoch": 0.03038162881083396,
"grad_norm": 1147.5810546875,
"learning_rate": 1.5191919191919193e-05,
"loss": 404.343,
"step": 3760
},
{
"epoch": 0.030462431015118092,
"grad_norm": 1645.48095703125,
"learning_rate": 1.5232323232323234e-05,
"loss": 393.5273,
"step": 3770
},
{
"epoch": 0.030543233219402225,
"grad_norm": 1111.396484375,
"learning_rate": 1.5272727272727276e-05,
"loss": 339.2941,
"step": 3780
},
{
"epoch": 0.030624035423686358,
"grad_norm": 1468.22216796875,
"learning_rate": 1.531313131313131e-05,
"loss": 428.423,
"step": 3790
},
{
"epoch": 0.03070483762797049,
"grad_norm": 6860.38134765625,
"learning_rate": 1.5353535353535354e-05,
"loss": 480.3668,
"step": 3800
},
{
"epoch": 0.030785639832254623,
"grad_norm": 1157.9761962890625,
"learning_rate": 1.5393939393939393e-05,
"loss": 397.3216,
"step": 3810
},
{
"epoch": 0.030866442036538756,
"grad_norm": 751.1998291015625,
"learning_rate": 1.5434343434343435e-05,
"loss": 314.7172,
"step": 3820
},
{
"epoch": 0.030947244240822888,
"grad_norm": 862.4395141601562,
"learning_rate": 1.5474747474747474e-05,
"loss": 372.4114,
"step": 3830
},
{
"epoch": 0.031028046445107024,
"grad_norm": 1985.647216796875,
"learning_rate": 1.5515151515151516e-05,
"loss": 410.9164,
"step": 3840
},
{
"epoch": 0.031108848649391157,
"grad_norm": 1223.5931396484375,
"learning_rate": 1.5555555555555555e-05,
"loss": 409.5396,
"step": 3850
},
{
"epoch": 0.03118965085367529,
"grad_norm": 1147.3355712890625,
"learning_rate": 1.5595959595959597e-05,
"loss": 343.1769,
"step": 3860
},
{
"epoch": 0.03127045305795942,
"grad_norm": 931.1632080078125,
"learning_rate": 1.563636363636364e-05,
"loss": 287.52,
"step": 3870
},
{
"epoch": 0.031351255262243555,
"grad_norm": 2921.90478515625,
"learning_rate": 1.5676767676767678e-05,
"loss": 466.8278,
"step": 3880
},
{
"epoch": 0.031432057466527684,
"grad_norm": 1351.41064453125,
"learning_rate": 1.571717171717172e-05,
"loss": 367.0911,
"step": 3890
},
{
"epoch": 0.03151285967081182,
"grad_norm": 2405.606689453125,
"learning_rate": 1.5757575757575756e-05,
"loss": 368.1811,
"step": 3900
},
{
"epoch": 0.031593661875095956,
"grad_norm": 4332.23095703125,
"learning_rate": 1.5797979797979798e-05,
"loss": 346.5809,
"step": 3910
},
{
"epoch": 0.031674464079380085,
"grad_norm": 2268.54736328125,
"learning_rate": 1.5838383838383837e-05,
"loss": 332.8164,
"step": 3920
},
{
"epoch": 0.03175526628366422,
"grad_norm": 1450.4573974609375,
"learning_rate": 1.587878787878788e-05,
"loss": 360.3381,
"step": 3930
},
{
"epoch": 0.03183606848794835,
"grad_norm": 7813.84912109375,
"learning_rate": 1.5919191919191918e-05,
"loss": 463.5723,
"step": 3940
},
{
"epoch": 0.03191687069223249,
"grad_norm": 1389.827880859375,
"learning_rate": 1.595959595959596e-05,
"loss": 272.0756,
"step": 3950
},
{
"epoch": 0.031997672896516616,
"grad_norm": 1054.016357421875,
"learning_rate": 1.6000000000000003e-05,
"loss": 410.4074,
"step": 3960
},
{
"epoch": 0.03207847510080075,
"grad_norm": 2336.52294921875,
"learning_rate": 1.604040404040404e-05,
"loss": 374.9348,
"step": 3970
},
{
"epoch": 0.03215927730508488,
"grad_norm": 1440.7698974609375,
"learning_rate": 1.6080808080808084e-05,
"loss": 433.5792,
"step": 3980
},
{
"epoch": 0.03224007950936902,
"grad_norm": 1275.298095703125,
"learning_rate": 1.6121212121212123e-05,
"loss": 407.9875,
"step": 3990
},
{
"epoch": 0.032320881713653146,
"grad_norm": 2616.390869140625,
"learning_rate": 1.6161616161616165e-05,
"loss": 345.8577,
"step": 4000
},
{
"epoch": 0.03240168391793728,
"grad_norm": 4405.9677734375,
"learning_rate": 1.62020202020202e-05,
"loss": 563.3746,
"step": 4010
},
{
"epoch": 0.03248248612222141,
"grad_norm": 1293.5716552734375,
"learning_rate": 1.6242424242424243e-05,
"loss": 438.8694,
"step": 4020
},
{
"epoch": 0.03256328832650555,
"grad_norm": 1728.333251953125,
"learning_rate": 1.628282828282828e-05,
"loss": 436.4476,
"step": 4030
},
{
"epoch": 0.03264409053078968,
"grad_norm": 1484.926025390625,
"learning_rate": 1.6323232323232324e-05,
"loss": 338.0915,
"step": 4040
},
{
"epoch": 0.03272489273507381,
"grad_norm": 1212.051025390625,
"learning_rate": 1.6363636363636366e-05,
"loss": 345.7399,
"step": 4050
},
{
"epoch": 0.03280569493935795,
"grad_norm": 2545.336669921875,
"learning_rate": 1.6404040404040405e-05,
"loss": 383.8622,
"step": 4060
},
{
"epoch": 0.03288649714364208,
"grad_norm": 1114.4097900390625,
"learning_rate": 1.6444444444444447e-05,
"loss": 329.3188,
"step": 4070
},
{
"epoch": 0.032967299347926214,
"grad_norm": 1056.169677734375,
"learning_rate": 1.6484848484848486e-05,
"loss": 311.8675,
"step": 4080
},
{
"epoch": 0.03304810155221034,
"grad_norm": 1482.0853271484375,
"learning_rate": 1.6525252525252528e-05,
"loss": 299.9676,
"step": 4090
},
{
"epoch": 0.03312890375649448,
"grad_norm": 7475.61279296875,
"learning_rate": 1.6565656565656567e-05,
"loss": 404.3645,
"step": 4100
},
{
"epoch": 0.03320970596077861,
"grad_norm": 1250.048583984375,
"learning_rate": 1.6606060606060606e-05,
"loss": 353.2959,
"step": 4110
},
{
"epoch": 0.033290508165062745,
"grad_norm": 6574.8798828125,
"learning_rate": 1.6646464646464645e-05,
"loss": 332.5623,
"step": 4120
},
{
"epoch": 0.033371310369346874,
"grad_norm": 884.6202392578125,
"learning_rate": 1.6686868686868687e-05,
"loss": 393.6061,
"step": 4130
},
{
"epoch": 0.03345211257363101,
"grad_norm": 1503.19384765625,
"learning_rate": 1.672727272727273e-05,
"loss": 471.0581,
"step": 4140
},
{
"epoch": 0.03353291477791514,
"grad_norm": 2390.65380859375,
"learning_rate": 1.6767676767676768e-05,
"loss": 379.6377,
"step": 4150
},
{
"epoch": 0.033613716982199275,
"grad_norm": 2654.139892578125,
"learning_rate": 1.680808080808081e-05,
"loss": 361.57,
"step": 4160
},
{
"epoch": 0.033694519186483404,
"grad_norm": 1114.0318603515625,
"learning_rate": 1.684848484848485e-05,
"loss": 315.6078,
"step": 4170
},
{
"epoch": 0.03377532139076754,
"grad_norm": 2269.401611328125,
"learning_rate": 1.688888888888889e-05,
"loss": 387.0298,
"step": 4180
},
{
"epoch": 0.033856123595051676,
"grad_norm": 2904.967529296875,
"learning_rate": 1.692929292929293e-05,
"loss": 286.9893,
"step": 4190
},
{
"epoch": 0.033936925799335806,
"grad_norm": 1229.243408203125,
"learning_rate": 1.6969696969696972e-05,
"loss": 371.6422,
"step": 4200
},
{
"epoch": 0.03401772800361994,
"grad_norm": 1940.672607421875,
"learning_rate": 1.701010101010101e-05,
"loss": 358.8707,
"step": 4210
},
{
"epoch": 0.03409853020790407,
"grad_norm": 2709.007568359375,
"learning_rate": 1.705050505050505e-05,
"loss": 333.9397,
"step": 4220
},
{
"epoch": 0.03417933241218821,
"grad_norm": 1473.49609375,
"learning_rate": 1.7090909090909092e-05,
"loss": 276.9469,
"step": 4230
},
{
"epoch": 0.034260134616472336,
"grad_norm": 2211.987060546875,
"learning_rate": 1.713131313131313e-05,
"loss": 343.1185,
"step": 4240
},
{
"epoch": 0.03434093682075647,
"grad_norm": 918.4265747070312,
"learning_rate": 1.7171717171717173e-05,
"loss": 441.9232,
"step": 4250
},
{
"epoch": 0.0344217390250406,
"grad_norm": 1585.6217041015625,
"learning_rate": 1.7212121212121212e-05,
"loss": 401.2611,
"step": 4260
},
{
"epoch": 0.03450254122932474,
"grad_norm": 2644.159912109375,
"learning_rate": 1.7252525252525255e-05,
"loss": 319.812,
"step": 4270
},
{
"epoch": 0.03458334343360887,
"grad_norm": 1674.13623046875,
"learning_rate": 1.7292929292929293e-05,
"loss": 410.6561,
"step": 4280
},
{
"epoch": 0.034664145637893,
"grad_norm": 7976.99609375,
"learning_rate": 1.7333333333333336e-05,
"loss": 384.7048,
"step": 4290
},
{
"epoch": 0.03474494784217713,
"grad_norm": 1133.471435546875,
"learning_rate": 1.7373737373737375e-05,
"loss": 518.7885,
"step": 4300
},
{
"epoch": 0.03482575004646127,
"grad_norm": 1117.8463134765625,
"learning_rate": 1.7414141414141417e-05,
"loss": 373.2396,
"step": 4310
},
{
"epoch": 0.0349065522507454,
"grad_norm": 2111.07373046875,
"learning_rate": 1.7454545454545456e-05,
"loss": 380.1865,
"step": 4320
},
{
"epoch": 0.03498735445502953,
"grad_norm": 3885.1787109375,
"learning_rate": 1.7494949494949494e-05,
"loss": 334.7283,
"step": 4330
},
{
"epoch": 0.03506815665931367,
"grad_norm": 28903.310546875,
"learning_rate": 1.7535353535353537e-05,
"loss": 333.2706,
"step": 4340
},
{
"epoch": 0.0351489588635978,
"grad_norm": 2059.6552734375,
"learning_rate": 1.7575757575757576e-05,
"loss": 358.8692,
"step": 4350
},
{
"epoch": 0.035229761067881935,
"grad_norm": 1722.4095458984375,
"learning_rate": 1.7616161616161618e-05,
"loss": 488.4147,
"step": 4360
},
{
"epoch": 0.035310563272166064,
"grad_norm": 1890.9517822265625,
"learning_rate": 1.7656565656565657e-05,
"loss": 432.1608,
"step": 4370
},
{
"epoch": 0.0353913654764502,
"grad_norm": 1202.87646484375,
"learning_rate": 1.76969696969697e-05,
"loss": 358.1542,
"step": 4380
},
{
"epoch": 0.03547216768073433,
"grad_norm": 2918.1181640625,
"learning_rate": 1.7737373737373738e-05,
"loss": 326.9036,
"step": 4390
},
{
"epoch": 0.035552969885018465,
"grad_norm": 901.55419921875,
"learning_rate": 1.777777777777778e-05,
"loss": 337.8409,
"step": 4400
},
{
"epoch": 0.035633772089302594,
"grad_norm": 1185.158447265625,
"learning_rate": 1.781818181818182e-05,
"loss": 359.4807,
"step": 4410
},
{
"epoch": 0.03571457429358673,
"grad_norm": 1455.7763671875,
"learning_rate": 1.785858585858586e-05,
"loss": 352.2927,
"step": 4420
},
{
"epoch": 0.03579537649787086,
"grad_norm": 2693.363037109375,
"learning_rate": 1.78989898989899e-05,
"loss": 370.5259,
"step": 4430
},
{
"epoch": 0.035876178702154995,
"grad_norm": 2017.5230712890625,
"learning_rate": 1.793939393939394e-05,
"loss": 323.6158,
"step": 4440
},
{
"epoch": 0.035956980906439125,
"grad_norm": 2855.155029296875,
"learning_rate": 1.797979797979798e-05,
"loss": 356.3111,
"step": 4450
},
{
"epoch": 0.03603778311072326,
"grad_norm": 2315.664306640625,
"learning_rate": 1.802020202020202e-05,
"loss": 319.9107,
"step": 4460
},
{
"epoch": 0.0361185853150074,
"grad_norm": 1550.8470458984375,
"learning_rate": 1.8060606060606062e-05,
"loss": 314.4013,
"step": 4470
},
{
"epoch": 0.036199387519291526,
"grad_norm": 2441.33984375,
"learning_rate": 1.81010101010101e-05,
"loss": 392.892,
"step": 4480
},
{
"epoch": 0.03628018972357566,
"grad_norm": 4503.03662109375,
"learning_rate": 1.8141414141414143e-05,
"loss": 314.955,
"step": 4490
},
{
"epoch": 0.03636099192785979,
"grad_norm": 1784.7449951171875,
"learning_rate": 1.8181818181818182e-05,
"loss": 350.5214,
"step": 4500
},
{
"epoch": 0.03644179413214393,
"grad_norm": 2955.252685546875,
"learning_rate": 1.8222222222222224e-05,
"loss": 340.7913,
"step": 4510
},
{
"epoch": 0.036522596336428056,
"grad_norm": 2789.01513671875,
"learning_rate": 1.8262626262626263e-05,
"loss": 317.8641,
"step": 4520
},
{
"epoch": 0.03660339854071219,
"grad_norm": 2411.71728515625,
"learning_rate": 1.8303030303030305e-05,
"loss": 407.7839,
"step": 4530
},
{
"epoch": 0.03668420074499632,
"grad_norm": 2479.978759765625,
"learning_rate": 1.8343434343434344e-05,
"loss": 373.7947,
"step": 4540
},
{
"epoch": 0.03676500294928046,
"grad_norm": 4516.0986328125,
"learning_rate": 1.8383838383838383e-05,
"loss": 353.7791,
"step": 4550
},
{
"epoch": 0.03684580515356459,
"grad_norm": 2898.03955078125,
"learning_rate": 1.8424242424242425e-05,
"loss": 272.4948,
"step": 4560
},
{
"epoch": 0.03692660735784872,
"grad_norm": 1040.892822265625,
"learning_rate": 1.8464646464646464e-05,
"loss": 271.458,
"step": 4570
},
{
"epoch": 0.03700740956213285,
"grad_norm": 4451.0947265625,
"learning_rate": 1.8505050505050506e-05,
"loss": 402.6701,
"step": 4580
},
{
"epoch": 0.03708821176641699,
"grad_norm": 3623.4384765625,
"learning_rate": 1.8545454545454545e-05,
"loss": 311.2422,
"step": 4590
},
{
"epoch": 0.03716901397070112,
"grad_norm": 2795.01806640625,
"learning_rate": 1.8585858585858588e-05,
"loss": 337.2056,
"step": 4600
},
{
"epoch": 0.037249816174985254,
"grad_norm": 1317.880859375,
"learning_rate": 1.8626262626262626e-05,
"loss": 367.2568,
"step": 4610
},
{
"epoch": 0.03733061837926939,
"grad_norm": 3477.01953125,
"learning_rate": 1.866666666666667e-05,
"loss": 364.4255,
"step": 4620
},
{
"epoch": 0.03741142058355352,
"grad_norm": 5694.70068359375,
"learning_rate": 1.8707070707070707e-05,
"loss": 356.1188,
"step": 4630
},
{
"epoch": 0.037492222787837655,
"grad_norm": 1367.3173828125,
"learning_rate": 1.874747474747475e-05,
"loss": 370.9894,
"step": 4640
},
{
"epoch": 0.037573024992121784,
"grad_norm": 2475.6630859375,
"learning_rate": 1.878787878787879e-05,
"loss": 362.2273,
"step": 4650
},
{
"epoch": 0.03765382719640592,
"grad_norm": 2650.109619140625,
"learning_rate": 1.8828282828282827e-05,
"loss": 374.3637,
"step": 4660
},
{
"epoch": 0.03773462940069005,
"grad_norm": 1204.2740478515625,
"learning_rate": 1.886868686868687e-05,
"loss": 392.7774,
"step": 4670
},
{
"epoch": 0.037815431604974185,
"grad_norm": 1503.53076171875,
"learning_rate": 1.890909090909091e-05,
"loss": 324.107,
"step": 4680
},
{
"epoch": 0.037896233809258315,
"grad_norm": 1724.01953125,
"learning_rate": 1.894949494949495e-05,
"loss": 327.8625,
"step": 4690
},
{
"epoch": 0.03797703601354245,
"grad_norm": 879.3681640625,
"learning_rate": 1.898989898989899e-05,
"loss": 294.3035,
"step": 4700
},
{
"epoch": 0.03805783821782658,
"grad_norm": 1925.4000244140625,
"learning_rate": 1.9030303030303032e-05,
"loss": 322.031,
"step": 4710
},
{
"epoch": 0.038138640422110716,
"grad_norm": 1489.126708984375,
"learning_rate": 1.907070707070707e-05,
"loss": 304.9248,
"step": 4720
},
{
"epoch": 0.038219442626394845,
"grad_norm": 1604.568603515625,
"learning_rate": 1.9111111111111113e-05,
"loss": 373.274,
"step": 4730
},
{
"epoch": 0.03830024483067898,
"grad_norm": 4657.24755859375,
"learning_rate": 1.9151515151515155e-05,
"loss": 390.6698,
"step": 4740
},
{
"epoch": 0.03838104703496312,
"grad_norm": 2019.6849365234375,
"learning_rate": 1.919191919191919e-05,
"loss": 361.781,
"step": 4750
},
{
"epoch": 0.038461849239247246,
"grad_norm": 3976.860107421875,
"learning_rate": 1.9232323232323233e-05,
"loss": 309.9517,
"step": 4760
},
{
"epoch": 0.03854265144353138,
"grad_norm": 996.7420043945312,
"learning_rate": 1.9272727272727272e-05,
"loss": 273.8845,
"step": 4770
},
{
"epoch": 0.03862345364781551,
"grad_norm": 5474.6142578125,
"learning_rate": 1.9313131313131314e-05,
"loss": 345.1392,
"step": 4780
},
{
"epoch": 0.03870425585209965,
"grad_norm": 1052.18115234375,
"learning_rate": 1.9353535353535353e-05,
"loss": 308.6369,
"step": 4790
},
{
"epoch": 0.03878505805638378,
"grad_norm": 2130.4462890625,
"learning_rate": 1.9393939393939395e-05,
"loss": 298.5647,
"step": 4800
},
{
"epoch": 0.03886586026066791,
"grad_norm": 4150.1962890625,
"learning_rate": 1.9434343434343434e-05,
"loss": 285.5174,
"step": 4810
},
{
"epoch": 0.03894666246495204,
"grad_norm": 1532.9091796875,
"learning_rate": 1.9474747474747476e-05,
"loss": 382.989,
"step": 4820
},
{
"epoch": 0.03902746466923618,
"grad_norm": 2456.46875,
"learning_rate": 1.951515151515152e-05,
"loss": 373.614,
"step": 4830
},
{
"epoch": 0.03910826687352031,
"grad_norm": 16273.7587890625,
"learning_rate": 1.9555555555555557e-05,
"loss": 271.4283,
"step": 4840
},
{
"epoch": 0.03918906907780444,
"grad_norm": 1657.758544921875,
"learning_rate": 1.95959595959596e-05,
"loss": 341.233,
"step": 4850
},
{
"epoch": 0.03926987128208857,
"grad_norm": 1495.0029296875,
"learning_rate": 1.9636363636363635e-05,
"loss": 396.9126,
"step": 4860
},
{
"epoch": 0.03935067348637271,
"grad_norm": 911.6842651367188,
"learning_rate": 1.9676767676767677e-05,
"loss": 259.0603,
"step": 4870
},
{
"epoch": 0.03943147569065684,
"grad_norm": 3205.702392578125,
"learning_rate": 1.9717171717171716e-05,
"loss": 345.96,
"step": 4880
},
{
"epoch": 0.039512277894940974,
"grad_norm": 1126.9405517578125,
"learning_rate": 1.975757575757576e-05,
"loss": 277.1656,
"step": 4890
},
{
"epoch": 0.03959308009922511,
"grad_norm": 1073.0098876953125,
"learning_rate": 1.9797979797979797e-05,
"loss": 314.1923,
"step": 4900
},
{
"epoch": 0.03967388230350924,
"grad_norm": 2321.18408203125,
"learning_rate": 1.983838383838384e-05,
"loss": 352.1233,
"step": 4910
},
{
"epoch": 0.039754684507793375,
"grad_norm": 3440.341064453125,
"learning_rate": 1.987878787878788e-05,
"loss": 387.7612,
"step": 4920
},
{
"epoch": 0.039835486712077504,
"grad_norm": 2081.38134765625,
"learning_rate": 1.991919191919192e-05,
"loss": 331.0657,
"step": 4930
},
{
"epoch": 0.03991628891636164,
"grad_norm": 1204.45703125,
"learning_rate": 1.9959595959595963e-05,
"loss": 310.2021,
"step": 4940
},
{
"epoch": 0.03999709112064577,
"grad_norm": 1972.9552001953125,
"learning_rate": 2e-05,
"loss": 284.6502,
"step": 4950
},
{
"epoch": 0.040077893324929906,
"grad_norm": 2157.6552734375,
"learning_rate": 2.0040404040404044e-05,
"loss": 374.1642,
"step": 4960
},
{
"epoch": 0.040158695529214035,
"grad_norm": 1071.7281494140625,
"learning_rate": 2.008080808080808e-05,
"loss": 315.1676,
"step": 4970
},
{
"epoch": 0.04023949773349817,
"grad_norm": 2088.307861328125,
"learning_rate": 2.012121212121212e-05,
"loss": 298.2418,
"step": 4980
},
{
"epoch": 0.0403202999377823,
"grad_norm": 1532.578857421875,
"learning_rate": 2.016161616161616e-05,
"loss": 343.915,
"step": 4990
},
{
"epoch": 0.040401102142066436,
"grad_norm": 1117.131591796875,
"learning_rate": 2.0202020202020203e-05,
"loss": 266.2881,
"step": 5000
}
],
"logging_steps": 10,
"max_steps": 123750,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}