hchcsuim's picture
End of training
6f202f3 verified
raw
history blame
66.4 kB
{
"best_metric": 0.9803726074405555,
"best_model_checkpoint": "FFPP-Raw_1FPS_faces-expand-40-aligned_metric-acc-precision-recall-f1\\checkpoint-4044",
"epoch": 3.0,
"eval_steps": 500,
"global_step": 4044,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 7.875372409820557,
"learning_rate": 1.234567901234568e-06,
"loss": 0.7221,
"step": 10
},
{
"epoch": 0.01,
"grad_norm": 5.61189079284668,
"learning_rate": 2.469135802469136e-06,
"loss": 0.6591,
"step": 20
},
{
"epoch": 0.02,
"grad_norm": 3.7870476245880127,
"learning_rate": 3.7037037037037037e-06,
"loss": 0.5729,
"step": 30
},
{
"epoch": 0.03,
"grad_norm": 3.753174066543579,
"learning_rate": 4.938271604938272e-06,
"loss": 0.5296,
"step": 40
},
{
"epoch": 0.04,
"grad_norm": 4.16574764251709,
"learning_rate": 6.172839506172839e-06,
"loss": 0.5178,
"step": 50
},
{
"epoch": 0.04,
"grad_norm": 2.851600408554077,
"learning_rate": 7.4074074074074075e-06,
"loss": 0.4847,
"step": 60
},
{
"epoch": 0.05,
"grad_norm": 3.5203375816345215,
"learning_rate": 8.641975308641975e-06,
"loss": 0.5449,
"step": 70
},
{
"epoch": 0.06,
"grad_norm": 5.667965888977051,
"learning_rate": 9.876543209876543e-06,
"loss": 0.5109,
"step": 80
},
{
"epoch": 0.07,
"grad_norm": 2.8893330097198486,
"learning_rate": 1.1111111111111112e-05,
"loss": 0.5429,
"step": 90
},
{
"epoch": 0.07,
"grad_norm": 3.7780683040618896,
"learning_rate": 1.2345679012345678e-05,
"loss": 0.5125,
"step": 100
},
{
"epoch": 0.08,
"grad_norm": 2.831078052520752,
"learning_rate": 1.3580246913580247e-05,
"loss": 0.5198,
"step": 110
},
{
"epoch": 0.09,
"grad_norm": 5.837050437927246,
"learning_rate": 1.4814814814814815e-05,
"loss": 0.5488,
"step": 120
},
{
"epoch": 0.1,
"grad_norm": 4.813414573669434,
"learning_rate": 1.604938271604938e-05,
"loss": 0.5093,
"step": 130
},
{
"epoch": 0.1,
"grad_norm": 7.337403774261475,
"learning_rate": 1.728395061728395e-05,
"loss": 0.5156,
"step": 140
},
{
"epoch": 0.11,
"grad_norm": 3.1361520290374756,
"learning_rate": 1.8518518518518518e-05,
"loss": 0.4917,
"step": 150
},
{
"epoch": 0.12,
"grad_norm": 3.362513303756714,
"learning_rate": 1.9753086419753087e-05,
"loss": 0.5069,
"step": 160
},
{
"epoch": 0.13,
"grad_norm": 5.006387233734131,
"learning_rate": 2.0987654320987655e-05,
"loss": 0.5263,
"step": 170
},
{
"epoch": 0.13,
"grad_norm": 5.531656742095947,
"learning_rate": 2.2222222222222223e-05,
"loss": 0.4929,
"step": 180
},
{
"epoch": 0.14,
"grad_norm": 7.966550350189209,
"learning_rate": 2.345679012345679e-05,
"loss": 0.496,
"step": 190
},
{
"epoch": 0.15,
"grad_norm": 9.282695770263672,
"learning_rate": 2.4691358024691357e-05,
"loss": 0.4463,
"step": 200
},
{
"epoch": 0.16,
"grad_norm": 5.86882209777832,
"learning_rate": 2.5925925925925925e-05,
"loss": 0.45,
"step": 210
},
{
"epoch": 0.16,
"grad_norm": 8.300739288330078,
"learning_rate": 2.7160493827160493e-05,
"loss": 0.4808,
"step": 220
},
{
"epoch": 0.17,
"grad_norm": 11.298906326293945,
"learning_rate": 2.839506172839506e-05,
"loss": 0.4344,
"step": 230
},
{
"epoch": 0.18,
"grad_norm": 6.296756744384766,
"learning_rate": 2.962962962962963e-05,
"loss": 0.4439,
"step": 240
},
{
"epoch": 0.19,
"grad_norm": 18.012989044189453,
"learning_rate": 3.08641975308642e-05,
"loss": 0.3889,
"step": 250
},
{
"epoch": 0.19,
"grad_norm": 7.0758867263793945,
"learning_rate": 3.209876543209876e-05,
"loss": 0.3825,
"step": 260
},
{
"epoch": 0.2,
"grad_norm": 17.492835998535156,
"learning_rate": 3.3333333333333335e-05,
"loss": 0.389,
"step": 270
},
{
"epoch": 0.21,
"grad_norm": 9.036677360534668,
"learning_rate": 3.45679012345679e-05,
"loss": 0.358,
"step": 280
},
{
"epoch": 0.22,
"grad_norm": 11.230429649353027,
"learning_rate": 3.580246913580247e-05,
"loss": 0.4006,
"step": 290
},
{
"epoch": 0.22,
"grad_norm": 10.332891464233398,
"learning_rate": 3.7037037037037037e-05,
"loss": 0.394,
"step": 300
},
{
"epoch": 0.23,
"grad_norm": 12.314022064208984,
"learning_rate": 3.82716049382716e-05,
"loss": 0.3386,
"step": 310
},
{
"epoch": 0.24,
"grad_norm": 19.994348526000977,
"learning_rate": 3.950617283950617e-05,
"loss": 0.3489,
"step": 320
},
{
"epoch": 0.24,
"grad_norm": 11.452990531921387,
"learning_rate": 4.074074074074074e-05,
"loss": 0.3448,
"step": 330
},
{
"epoch": 0.25,
"grad_norm": 22.366451263427734,
"learning_rate": 4.197530864197531e-05,
"loss": 0.3499,
"step": 340
},
{
"epoch": 0.26,
"grad_norm": 7.709282398223877,
"learning_rate": 4.3209876543209875e-05,
"loss": 0.4138,
"step": 350
},
{
"epoch": 0.27,
"grad_norm": 26.1550350189209,
"learning_rate": 4.4444444444444447e-05,
"loss": 0.3593,
"step": 360
},
{
"epoch": 0.27,
"grad_norm": 5.04647159576416,
"learning_rate": 4.567901234567901e-05,
"loss": 0.4406,
"step": 370
},
{
"epoch": 0.28,
"grad_norm": 6.908942699432373,
"learning_rate": 4.691358024691358e-05,
"loss": 0.3467,
"step": 380
},
{
"epoch": 0.29,
"grad_norm": 6.973005294799805,
"learning_rate": 4.814814814814815e-05,
"loss": 0.3344,
"step": 390
},
{
"epoch": 0.3,
"grad_norm": 14.637669563293457,
"learning_rate": 4.938271604938271e-05,
"loss": 0.3448,
"step": 400
},
{
"epoch": 0.3,
"grad_norm": 12.542431831359863,
"learning_rate": 4.993129980763946e-05,
"loss": 0.3151,
"step": 410
},
{
"epoch": 0.31,
"grad_norm": 11.723505973815918,
"learning_rate": 4.979389942291839e-05,
"loss": 0.3707,
"step": 420
},
{
"epoch": 0.32,
"grad_norm": 16.42662239074707,
"learning_rate": 4.965649903819731e-05,
"loss": 0.3064,
"step": 430
},
{
"epoch": 0.33,
"grad_norm": 12.97465991973877,
"learning_rate": 4.951909865347623e-05,
"loss": 0.3307,
"step": 440
},
{
"epoch": 0.33,
"grad_norm": 7.82028865814209,
"learning_rate": 4.9381698268755155e-05,
"loss": 0.3078,
"step": 450
},
{
"epoch": 0.34,
"grad_norm": 8.340543746948242,
"learning_rate": 4.924429788403408e-05,
"loss": 0.3186,
"step": 460
},
{
"epoch": 0.35,
"grad_norm": 7.294260501861572,
"learning_rate": 4.9106897499313e-05,
"loss": 0.3153,
"step": 470
},
{
"epoch": 0.36,
"grad_norm": 22.920377731323242,
"learning_rate": 4.896949711459192e-05,
"loss": 0.3264,
"step": 480
},
{
"epoch": 0.36,
"grad_norm": 11.397745132446289,
"learning_rate": 4.883209672987085e-05,
"loss": 0.3139,
"step": 490
},
{
"epoch": 0.37,
"grad_norm": 7.087658882141113,
"learning_rate": 4.8694696345149774e-05,
"loss": 0.2879,
"step": 500
},
{
"epoch": 0.38,
"grad_norm": 9.579562187194824,
"learning_rate": 4.8557295960428687e-05,
"loss": 0.3256,
"step": 510
},
{
"epoch": 0.39,
"grad_norm": 19.549768447875977,
"learning_rate": 4.841989557570761e-05,
"loss": 0.3891,
"step": 520
},
{
"epoch": 0.39,
"grad_norm": 15.651568412780762,
"learning_rate": 4.828249519098654e-05,
"loss": 0.3692,
"step": 530
},
{
"epoch": 0.4,
"grad_norm": 12.224865913391113,
"learning_rate": 4.814509480626546e-05,
"loss": 0.2973,
"step": 540
},
{
"epoch": 0.41,
"grad_norm": 10.717370986938477,
"learning_rate": 4.800769442154438e-05,
"loss": 0.2485,
"step": 550
},
{
"epoch": 0.42,
"grad_norm": 19.767841339111328,
"learning_rate": 4.7870294036823306e-05,
"loss": 0.2418,
"step": 560
},
{
"epoch": 0.42,
"grad_norm": 12.485086441040039,
"learning_rate": 4.7732893652102225e-05,
"loss": 0.2939,
"step": 570
},
{
"epoch": 0.43,
"grad_norm": 7.367982864379883,
"learning_rate": 4.759549326738115e-05,
"loss": 0.2863,
"step": 580
},
{
"epoch": 0.44,
"grad_norm": 7.0615925788879395,
"learning_rate": 4.745809288266007e-05,
"loss": 0.2708,
"step": 590
},
{
"epoch": 0.45,
"grad_norm": 12.892487525939941,
"learning_rate": 4.7320692497939e-05,
"loss": 0.2565,
"step": 600
},
{
"epoch": 0.45,
"grad_norm": 24.594287872314453,
"learning_rate": 4.718329211321792e-05,
"loss": 0.3655,
"step": 610
},
{
"epoch": 0.46,
"grad_norm": 5.207724571228027,
"learning_rate": 4.7045891728496844e-05,
"loss": 0.3273,
"step": 620
},
{
"epoch": 0.47,
"grad_norm": 13.92258071899414,
"learning_rate": 4.6908491343775764e-05,
"loss": 0.2892,
"step": 630
},
{
"epoch": 0.47,
"grad_norm": 9.86690902709961,
"learning_rate": 4.6771090959054684e-05,
"loss": 0.2541,
"step": 640
},
{
"epoch": 0.48,
"grad_norm": 11.439994812011719,
"learning_rate": 4.663369057433361e-05,
"loss": 0.2984,
"step": 650
},
{
"epoch": 0.49,
"grad_norm": 7.470710277557373,
"learning_rate": 4.649629018961254e-05,
"loss": 0.263,
"step": 660
},
{
"epoch": 0.5,
"grad_norm": 7.861254692077637,
"learning_rate": 4.6358889804891456e-05,
"loss": 0.2686,
"step": 670
},
{
"epoch": 0.5,
"grad_norm": 21.025678634643555,
"learning_rate": 4.6221489420170376e-05,
"loss": 0.3311,
"step": 680
},
{
"epoch": 0.51,
"grad_norm": 11.080821990966797,
"learning_rate": 4.60840890354493e-05,
"loss": 0.3272,
"step": 690
},
{
"epoch": 0.52,
"grad_norm": 14.8310546875,
"learning_rate": 4.594668865072822e-05,
"loss": 0.292,
"step": 700
},
{
"epoch": 0.53,
"grad_norm": 11.789827346801758,
"learning_rate": 4.580928826600714e-05,
"loss": 0.3264,
"step": 710
},
{
"epoch": 0.53,
"grad_norm": 10.310857772827148,
"learning_rate": 4.567188788128607e-05,
"loss": 0.2687,
"step": 720
},
{
"epoch": 0.54,
"grad_norm": 11.421713829040527,
"learning_rate": 4.5534487496564995e-05,
"loss": 0.2395,
"step": 730
},
{
"epoch": 0.55,
"grad_norm": 10.276237487792969,
"learning_rate": 4.5397087111843915e-05,
"loss": 0.2133,
"step": 740
},
{
"epoch": 0.56,
"grad_norm": 7.676044940948486,
"learning_rate": 4.525968672712284e-05,
"loss": 0.2396,
"step": 750
},
{
"epoch": 0.56,
"grad_norm": 7.302138328552246,
"learning_rate": 4.512228634240176e-05,
"loss": 0.2519,
"step": 760
},
{
"epoch": 0.57,
"grad_norm": 13.434986114501953,
"learning_rate": 4.498488595768068e-05,
"loss": 0.3062,
"step": 770
},
{
"epoch": 0.58,
"grad_norm": 6.166885852813721,
"learning_rate": 4.484748557295961e-05,
"loss": 0.1895,
"step": 780
},
{
"epoch": 0.59,
"grad_norm": 11.351304054260254,
"learning_rate": 4.4710085188238534e-05,
"loss": 0.2362,
"step": 790
},
{
"epoch": 0.59,
"grad_norm": 7.459861755371094,
"learning_rate": 4.4572684803517453e-05,
"loss": 0.183,
"step": 800
},
{
"epoch": 0.6,
"grad_norm": 11.746100425720215,
"learning_rate": 4.443528441879637e-05,
"loss": 0.2394,
"step": 810
},
{
"epoch": 0.61,
"grad_norm": 14.919011116027832,
"learning_rate": 4.42978840340753e-05,
"loss": 0.2555,
"step": 820
},
{
"epoch": 0.62,
"grad_norm": 16.357730865478516,
"learning_rate": 4.4160483649354226e-05,
"loss": 0.2135,
"step": 830
},
{
"epoch": 0.62,
"grad_norm": 16.165504455566406,
"learning_rate": 4.402308326463314e-05,
"loss": 0.2839,
"step": 840
},
{
"epoch": 0.63,
"grad_norm": 11.526119232177734,
"learning_rate": 4.3885682879912066e-05,
"loss": 0.2261,
"step": 850
},
{
"epoch": 0.64,
"grad_norm": 9.917882919311523,
"learning_rate": 4.374828249519099e-05,
"loss": 0.2047,
"step": 860
},
{
"epoch": 0.65,
"grad_norm": 8.00707721710205,
"learning_rate": 4.361088211046991e-05,
"loss": 0.2211,
"step": 870
},
{
"epoch": 0.65,
"grad_norm": 13.628582000732422,
"learning_rate": 4.347348172574883e-05,
"loss": 0.2352,
"step": 880
},
{
"epoch": 0.66,
"grad_norm": 16.522411346435547,
"learning_rate": 4.333608134102776e-05,
"loss": 0.2686,
"step": 890
},
{
"epoch": 0.67,
"grad_norm": 10.696971893310547,
"learning_rate": 4.319868095630668e-05,
"loss": 0.2304,
"step": 900
},
{
"epoch": 0.68,
"grad_norm": 15.84067440032959,
"learning_rate": 4.3061280571585604e-05,
"loss": 0.2011,
"step": 910
},
{
"epoch": 0.68,
"grad_norm": 5.352001667022705,
"learning_rate": 4.2923880186864524e-05,
"loss": 0.2443,
"step": 920
},
{
"epoch": 0.69,
"grad_norm": 5.072910308837891,
"learning_rate": 4.278647980214345e-05,
"loss": 0.2372,
"step": 930
},
{
"epoch": 0.7,
"grad_norm": 9.378905296325684,
"learning_rate": 4.264907941742237e-05,
"loss": 0.2207,
"step": 940
},
{
"epoch": 0.7,
"grad_norm": 17.055631637573242,
"learning_rate": 4.25116790327013e-05,
"loss": 0.218,
"step": 950
},
{
"epoch": 0.71,
"grad_norm": 9.726360321044922,
"learning_rate": 4.2374278647980216e-05,
"loss": 0.1867,
"step": 960
},
{
"epoch": 0.72,
"grad_norm": 5.339004993438721,
"learning_rate": 4.2236878263259136e-05,
"loss": 0.2321,
"step": 970
},
{
"epoch": 0.73,
"grad_norm": 16.372512817382812,
"learning_rate": 4.209947787853806e-05,
"loss": 0.1977,
"step": 980
},
{
"epoch": 0.73,
"grad_norm": 6.280429363250732,
"learning_rate": 4.196207749381699e-05,
"loss": 0.2268,
"step": 990
},
{
"epoch": 0.74,
"grad_norm": 9.613251686096191,
"learning_rate": 4.18246771090959e-05,
"loss": 0.2157,
"step": 1000
},
{
"epoch": 0.75,
"grad_norm": 7.933209419250488,
"learning_rate": 4.168727672437483e-05,
"loss": 0.1986,
"step": 1010
},
{
"epoch": 0.76,
"grad_norm": 7.31367301940918,
"learning_rate": 4.1549876339653755e-05,
"loss": 0.1877,
"step": 1020
},
{
"epoch": 0.76,
"grad_norm": 30.09975814819336,
"learning_rate": 4.1412475954932675e-05,
"loss": 0.2361,
"step": 1030
},
{
"epoch": 0.77,
"grad_norm": 6.245104789733887,
"learning_rate": 4.1275075570211595e-05,
"loss": 0.2039,
"step": 1040
},
{
"epoch": 0.78,
"grad_norm": 7.538546085357666,
"learning_rate": 4.113767518549052e-05,
"loss": 0.1956,
"step": 1050
},
{
"epoch": 0.79,
"grad_norm": 10.740853309631348,
"learning_rate": 4.100027480076945e-05,
"loss": 0.2041,
"step": 1060
},
{
"epoch": 0.79,
"grad_norm": 6.043615341186523,
"learning_rate": 4.086287441604837e-05,
"loss": 0.1999,
"step": 1070
},
{
"epoch": 0.8,
"grad_norm": 8.885452270507812,
"learning_rate": 4.072547403132729e-05,
"loss": 0.1876,
"step": 1080
},
{
"epoch": 0.81,
"grad_norm": 9.384923934936523,
"learning_rate": 4.0588073646606214e-05,
"loss": 0.1829,
"step": 1090
},
{
"epoch": 0.82,
"grad_norm": 11.624629020690918,
"learning_rate": 4.045067326188513e-05,
"loss": 0.2287,
"step": 1100
},
{
"epoch": 0.82,
"grad_norm": 8.505549430847168,
"learning_rate": 4.031327287716406e-05,
"loss": 0.208,
"step": 1110
},
{
"epoch": 0.83,
"grad_norm": 14.19863224029541,
"learning_rate": 4.017587249244298e-05,
"loss": 0.199,
"step": 1120
},
{
"epoch": 0.84,
"grad_norm": 14.460047721862793,
"learning_rate": 4.0038472107721906e-05,
"loss": 0.2214,
"step": 1130
},
{
"epoch": 0.85,
"grad_norm": 7.287010669708252,
"learning_rate": 3.9901071723000826e-05,
"loss": 0.2268,
"step": 1140
},
{
"epoch": 0.85,
"grad_norm": 8.882172584533691,
"learning_rate": 3.976367133827975e-05,
"loss": 0.183,
"step": 1150
},
{
"epoch": 0.86,
"grad_norm": 4.59747314453125,
"learning_rate": 3.962627095355867e-05,
"loss": 0.2066,
"step": 1160
},
{
"epoch": 0.87,
"grad_norm": 9.367794036865234,
"learning_rate": 3.948887056883759e-05,
"loss": 0.1719,
"step": 1170
},
{
"epoch": 0.88,
"grad_norm": 9.384147644042969,
"learning_rate": 3.935147018411652e-05,
"loss": 0.2052,
"step": 1180
},
{
"epoch": 0.88,
"grad_norm": 7.724942207336426,
"learning_rate": 3.9214069799395445e-05,
"loss": 0.2437,
"step": 1190
},
{
"epoch": 0.89,
"grad_norm": 7.259653568267822,
"learning_rate": 3.907666941467436e-05,
"loss": 0.2164,
"step": 1200
},
{
"epoch": 0.9,
"grad_norm": 11.061640739440918,
"learning_rate": 3.8939269029953284e-05,
"loss": 0.1728,
"step": 1210
},
{
"epoch": 0.91,
"grad_norm": 7.902336120605469,
"learning_rate": 3.880186864523221e-05,
"loss": 0.2218,
"step": 1220
},
{
"epoch": 0.91,
"grad_norm": 8.82502555847168,
"learning_rate": 3.866446826051113e-05,
"loss": 0.2196,
"step": 1230
},
{
"epoch": 0.92,
"grad_norm": 6.531546592712402,
"learning_rate": 3.852706787579005e-05,
"loss": 0.199,
"step": 1240
},
{
"epoch": 0.93,
"grad_norm": 10.291007995605469,
"learning_rate": 3.8389667491068977e-05,
"loss": 0.2116,
"step": 1250
},
{
"epoch": 0.93,
"grad_norm": 12.68818187713623,
"learning_rate": 3.82522671063479e-05,
"loss": 0.2001,
"step": 1260
},
{
"epoch": 0.94,
"grad_norm": 19.413272857666016,
"learning_rate": 3.811486672162682e-05,
"loss": 0.2016,
"step": 1270
},
{
"epoch": 0.95,
"grad_norm": 7.375454425811768,
"learning_rate": 3.797746633690574e-05,
"loss": 0.1815,
"step": 1280
},
{
"epoch": 0.96,
"grad_norm": 5.35936164855957,
"learning_rate": 3.784006595218467e-05,
"loss": 0.2009,
"step": 1290
},
{
"epoch": 0.96,
"grad_norm": 9.566622734069824,
"learning_rate": 3.770266556746359e-05,
"loss": 0.1757,
"step": 1300
},
{
"epoch": 0.97,
"grad_norm": 9.03822135925293,
"learning_rate": 3.7565265182742515e-05,
"loss": 0.2277,
"step": 1310
},
{
"epoch": 0.98,
"grad_norm": 6.241549015045166,
"learning_rate": 3.7427864798021435e-05,
"loss": 0.1884,
"step": 1320
},
{
"epoch": 0.99,
"grad_norm": 7.179203987121582,
"learning_rate": 3.7290464413300355e-05,
"loss": 0.1692,
"step": 1330
},
{
"epoch": 0.99,
"grad_norm": 5.8050312995910645,
"learning_rate": 3.715306402857928e-05,
"loss": 0.1821,
"step": 1340
},
{
"epoch": 1.0,
"eval_accuracy": 0.9464159430538971,
"eval_f1": 0.8738123839685487,
"eval_loss": 0.12856225669384003,
"eval_precision": 0.8953228152623923,
"eval_recall": 0.8533112935907007,
"eval_roc_auc": 0.9857682483198157,
"eval_runtime": 142.7117,
"eval_samples_per_second": 604.415,
"eval_steps_per_second": 37.782,
"step": 1348
},
{
"epoch": 1.0,
"grad_norm": 9.314454078674316,
"learning_rate": 3.701566364385821e-05,
"loss": 0.1924,
"step": 1350
},
{
"epoch": 1.01,
"grad_norm": 5.798178672790527,
"learning_rate": 3.687826325913713e-05,
"loss": 0.1764,
"step": 1360
},
{
"epoch": 1.02,
"grad_norm": 12.371926307678223,
"learning_rate": 3.674086287441605e-05,
"loss": 0.1771,
"step": 1370
},
{
"epoch": 1.02,
"grad_norm": 7.354398727416992,
"learning_rate": 3.6603462489694974e-05,
"loss": 0.1631,
"step": 1380
},
{
"epoch": 1.03,
"grad_norm": 15.515620231628418,
"learning_rate": 3.64660621049739e-05,
"loss": 0.176,
"step": 1390
},
{
"epoch": 1.04,
"grad_norm": 8.227542877197266,
"learning_rate": 3.632866172025282e-05,
"loss": 0.1846,
"step": 1400
},
{
"epoch": 1.05,
"grad_norm": 6.485056400299072,
"learning_rate": 3.619126133553174e-05,
"loss": 0.1648,
"step": 1410
},
{
"epoch": 1.05,
"grad_norm": 8.542535781860352,
"learning_rate": 3.6053860950810666e-05,
"loss": 0.2062,
"step": 1420
},
{
"epoch": 1.06,
"grad_norm": 7.3602752685546875,
"learning_rate": 3.5916460566089586e-05,
"loss": 0.2463,
"step": 1430
},
{
"epoch": 1.07,
"grad_norm": 11.33146858215332,
"learning_rate": 3.577906018136851e-05,
"loss": 0.1959,
"step": 1440
},
{
"epoch": 1.08,
"grad_norm": 7.324457168579102,
"learning_rate": 3.564165979664743e-05,
"loss": 0.1607,
"step": 1450
},
{
"epoch": 1.08,
"grad_norm": 19.9168758392334,
"learning_rate": 3.550425941192635e-05,
"loss": 0.1749,
"step": 1460
},
{
"epoch": 1.09,
"grad_norm": 7.351601600646973,
"learning_rate": 3.536685902720528e-05,
"loss": 0.1478,
"step": 1470
},
{
"epoch": 1.1,
"grad_norm": 12.877877235412598,
"learning_rate": 3.5229458642484205e-05,
"loss": 0.2172,
"step": 1480
},
{
"epoch": 1.11,
"grad_norm": 11.349455833435059,
"learning_rate": 3.5092058257763125e-05,
"loss": 0.175,
"step": 1490
},
{
"epoch": 1.11,
"grad_norm": 8.51469898223877,
"learning_rate": 3.4954657873042044e-05,
"loss": 0.2063,
"step": 1500
},
{
"epoch": 1.12,
"grad_norm": 8.571216583251953,
"learning_rate": 3.481725748832097e-05,
"loss": 0.1753,
"step": 1510
},
{
"epoch": 1.13,
"grad_norm": 3.8904285430908203,
"learning_rate": 3.46798571035999e-05,
"loss": 0.1534,
"step": 1520
},
{
"epoch": 1.14,
"grad_norm": 14.162818908691406,
"learning_rate": 3.454245671887881e-05,
"loss": 0.2191,
"step": 1530
},
{
"epoch": 1.14,
"grad_norm": 19.316482543945312,
"learning_rate": 3.440505633415774e-05,
"loss": 0.1703,
"step": 1540
},
{
"epoch": 1.15,
"grad_norm": 5.541199207305908,
"learning_rate": 3.426765594943666e-05,
"loss": 0.1666,
"step": 1550
},
{
"epoch": 1.16,
"grad_norm": 6.734847545623779,
"learning_rate": 3.413025556471558e-05,
"loss": 0.1819,
"step": 1560
},
{
"epoch": 1.16,
"grad_norm": 9.651701927185059,
"learning_rate": 3.39928551799945e-05,
"loss": 0.185,
"step": 1570
},
{
"epoch": 1.17,
"grad_norm": 6.829617977142334,
"learning_rate": 3.385545479527343e-05,
"loss": 0.1486,
"step": 1580
},
{
"epoch": 1.18,
"grad_norm": 9.438749313354492,
"learning_rate": 3.371805441055235e-05,
"loss": 0.1862,
"step": 1590
},
{
"epoch": 1.19,
"grad_norm": 4.862937927246094,
"learning_rate": 3.3580654025831275e-05,
"loss": 0.2044,
"step": 1600
},
{
"epoch": 1.19,
"grad_norm": 9.02552604675293,
"learning_rate": 3.3443253641110195e-05,
"loss": 0.147,
"step": 1610
},
{
"epoch": 1.2,
"grad_norm": 7.10019588470459,
"learning_rate": 3.330585325638912e-05,
"loss": 0.1426,
"step": 1620
},
{
"epoch": 1.21,
"grad_norm": 7.72890043258667,
"learning_rate": 3.316845287166804e-05,
"loss": 0.1247,
"step": 1630
},
{
"epoch": 1.22,
"grad_norm": 3.0632925033569336,
"learning_rate": 3.303105248694697e-05,
"loss": 0.1354,
"step": 1640
},
{
"epoch": 1.22,
"grad_norm": 13.587533950805664,
"learning_rate": 3.289365210222589e-05,
"loss": 0.1073,
"step": 1650
},
{
"epoch": 1.23,
"grad_norm": 8.919532775878906,
"learning_rate": 3.275625171750481e-05,
"loss": 0.1953,
"step": 1660
},
{
"epoch": 1.24,
"grad_norm": 13.225985527038574,
"learning_rate": 3.2618851332783734e-05,
"loss": 0.1787,
"step": 1670
},
{
"epoch": 1.25,
"grad_norm": 6.826201438903809,
"learning_rate": 3.248145094806266e-05,
"loss": 0.1469,
"step": 1680
},
{
"epoch": 1.25,
"grad_norm": 10.931801795959473,
"learning_rate": 3.234405056334158e-05,
"loss": 0.1516,
"step": 1690
},
{
"epoch": 1.26,
"grad_norm": 12.377479553222656,
"learning_rate": 3.22066501786205e-05,
"loss": 0.1718,
"step": 1700
},
{
"epoch": 1.27,
"grad_norm": 13.63291072845459,
"learning_rate": 3.2069249793899426e-05,
"loss": 0.1618,
"step": 1710
},
{
"epoch": 1.28,
"grad_norm": 9.691218376159668,
"learning_rate": 3.193184940917835e-05,
"loss": 0.1993,
"step": 1720
},
{
"epoch": 1.28,
"grad_norm": 7.020012378692627,
"learning_rate": 3.1794449024457266e-05,
"loss": 0.1694,
"step": 1730
},
{
"epoch": 1.29,
"grad_norm": 6.615390777587891,
"learning_rate": 3.165704863973619e-05,
"loss": 0.1456,
"step": 1740
},
{
"epoch": 1.3,
"grad_norm": 9.857015609741211,
"learning_rate": 3.151964825501512e-05,
"loss": 0.1678,
"step": 1750
},
{
"epoch": 1.31,
"grad_norm": 8.324471473693848,
"learning_rate": 3.138224787029404e-05,
"loss": 0.1451,
"step": 1760
},
{
"epoch": 1.31,
"grad_norm": 12.80250072479248,
"learning_rate": 3.124484748557296e-05,
"loss": 0.1635,
"step": 1770
},
{
"epoch": 1.32,
"grad_norm": 6.665610313415527,
"learning_rate": 3.1107447100851885e-05,
"loss": 0.1064,
"step": 1780
},
{
"epoch": 1.33,
"grad_norm": 10.786760330200195,
"learning_rate": 3.0970046716130804e-05,
"loss": 0.1712,
"step": 1790
},
{
"epoch": 1.34,
"grad_norm": 7.054905414581299,
"learning_rate": 3.083264633140973e-05,
"loss": 0.1287,
"step": 1800
},
{
"epoch": 1.34,
"grad_norm": 7.06713342666626,
"learning_rate": 3.069524594668865e-05,
"loss": 0.148,
"step": 1810
},
{
"epoch": 1.35,
"grad_norm": 6.280238628387451,
"learning_rate": 3.055784556196758e-05,
"loss": 0.1451,
"step": 1820
},
{
"epoch": 1.36,
"grad_norm": 5.174367904663086,
"learning_rate": 3.0420445177246497e-05,
"loss": 0.1261,
"step": 1830
},
{
"epoch": 1.36,
"grad_norm": 9.55992317199707,
"learning_rate": 3.0283044792525423e-05,
"loss": 0.1895,
"step": 1840
},
{
"epoch": 1.37,
"grad_norm": 8.73131275177002,
"learning_rate": 3.014564440780434e-05,
"loss": 0.1729,
"step": 1850
},
{
"epoch": 1.38,
"grad_norm": 4.344249248504639,
"learning_rate": 3.0008244023083266e-05,
"loss": 0.1536,
"step": 1860
},
{
"epoch": 1.39,
"grad_norm": 7.8159332275390625,
"learning_rate": 2.987084363836219e-05,
"loss": 0.1071,
"step": 1870
},
{
"epoch": 1.39,
"grad_norm": 10.302350044250488,
"learning_rate": 2.9733443253641112e-05,
"loss": 0.1438,
"step": 1880
},
{
"epoch": 1.4,
"grad_norm": 8.760666847229004,
"learning_rate": 2.9596042868920032e-05,
"loss": 0.1614,
"step": 1890
},
{
"epoch": 1.41,
"grad_norm": 9.023147583007812,
"learning_rate": 2.9458642484198955e-05,
"loss": 0.1227,
"step": 1900
},
{
"epoch": 1.42,
"grad_norm": 7.698544502258301,
"learning_rate": 2.932124209947788e-05,
"loss": 0.1524,
"step": 1910
},
{
"epoch": 1.42,
"grad_norm": 9.56216049194336,
"learning_rate": 2.9183841714756805e-05,
"loss": 0.1292,
"step": 1920
},
{
"epoch": 1.43,
"grad_norm": 2.997666358947754,
"learning_rate": 2.9046441330035725e-05,
"loss": 0.1173,
"step": 1930
},
{
"epoch": 1.44,
"grad_norm": 20.892711639404297,
"learning_rate": 2.8909040945314648e-05,
"loss": 0.1681,
"step": 1940
},
{
"epoch": 1.45,
"grad_norm": 10.03374195098877,
"learning_rate": 2.877164056059357e-05,
"loss": 0.1412,
"step": 1950
},
{
"epoch": 1.45,
"grad_norm": 11.981986999511719,
"learning_rate": 2.8634240175872494e-05,
"loss": 0.1266,
"step": 1960
},
{
"epoch": 1.46,
"grad_norm": 6.160180568695068,
"learning_rate": 2.8496839791151414e-05,
"loss": 0.1216,
"step": 1970
},
{
"epoch": 1.47,
"grad_norm": 6.978806495666504,
"learning_rate": 2.8359439406430337e-05,
"loss": 0.1488,
"step": 1980
},
{
"epoch": 1.48,
"grad_norm": 8.241849899291992,
"learning_rate": 2.8222039021709263e-05,
"loss": 0.164,
"step": 1990
},
{
"epoch": 1.48,
"grad_norm": 3.566959857940674,
"learning_rate": 2.8084638636988186e-05,
"loss": 0.1192,
"step": 2000
},
{
"epoch": 1.49,
"grad_norm": 6.145435333251953,
"learning_rate": 2.7947238252267106e-05,
"loss": 0.1467,
"step": 2010
},
{
"epoch": 1.5,
"grad_norm": 7.5334086418151855,
"learning_rate": 2.780983786754603e-05,
"loss": 0.1388,
"step": 2020
},
{
"epoch": 1.51,
"grad_norm": 6.36589241027832,
"learning_rate": 2.7672437482824952e-05,
"loss": 0.1536,
"step": 2030
},
{
"epoch": 1.51,
"grad_norm": 9.310737609863281,
"learning_rate": 2.753503709810388e-05,
"loss": 0.1215,
"step": 2040
},
{
"epoch": 1.52,
"grad_norm": 5.120606422424316,
"learning_rate": 2.7397636713382795e-05,
"loss": 0.1112,
"step": 2050
},
{
"epoch": 1.53,
"grad_norm": 10.128201484680176,
"learning_rate": 2.726023632866172e-05,
"loss": 0.1386,
"step": 2060
},
{
"epoch": 1.54,
"grad_norm": 19.47095489501953,
"learning_rate": 2.7122835943940645e-05,
"loss": 0.1353,
"step": 2070
},
{
"epoch": 1.54,
"grad_norm": 5.68997049331665,
"learning_rate": 2.6985435559219568e-05,
"loss": 0.1601,
"step": 2080
},
{
"epoch": 1.55,
"grad_norm": 19.490400314331055,
"learning_rate": 2.684803517449849e-05,
"loss": 0.1748,
"step": 2090
},
{
"epoch": 1.56,
"grad_norm": 4.7767791748046875,
"learning_rate": 2.671063478977741e-05,
"loss": 0.13,
"step": 2100
},
{
"epoch": 1.57,
"grad_norm": 6.445133686065674,
"learning_rate": 2.6573234405056334e-05,
"loss": 0.1422,
"step": 2110
},
{
"epoch": 1.57,
"grad_norm": 18.48575782775879,
"learning_rate": 2.643583402033526e-05,
"loss": 0.123,
"step": 2120
},
{
"epoch": 1.58,
"grad_norm": 6.95095157623291,
"learning_rate": 2.6298433635614183e-05,
"loss": 0.1519,
"step": 2130
},
{
"epoch": 1.59,
"grad_norm": 5.297643661499023,
"learning_rate": 2.6161033250893103e-05,
"loss": 0.1499,
"step": 2140
},
{
"epoch": 1.59,
"grad_norm": 7.358607292175293,
"learning_rate": 2.6023632866172026e-05,
"loss": 0.1397,
"step": 2150
},
{
"epoch": 1.6,
"grad_norm": 8.367630958557129,
"learning_rate": 2.588623248145095e-05,
"loss": 0.1642,
"step": 2160
},
{
"epoch": 1.61,
"grad_norm": 7.217466831207275,
"learning_rate": 2.5748832096729876e-05,
"loss": 0.1166,
"step": 2170
},
{
"epoch": 1.62,
"grad_norm": 8.88287353515625,
"learning_rate": 2.5611431712008792e-05,
"loss": 0.1645,
"step": 2180
},
{
"epoch": 1.62,
"grad_norm": 6.055432319641113,
"learning_rate": 2.547403132728772e-05,
"loss": 0.1317,
"step": 2190
},
{
"epoch": 1.63,
"grad_norm": 10.437047004699707,
"learning_rate": 2.5336630942566642e-05,
"loss": 0.1293,
"step": 2200
},
{
"epoch": 1.64,
"grad_norm": 11.063042640686035,
"learning_rate": 2.5199230557845565e-05,
"loss": 0.1148,
"step": 2210
},
{
"epoch": 1.65,
"grad_norm": 5.3282599449157715,
"learning_rate": 2.5061830173124485e-05,
"loss": 0.1242,
"step": 2220
},
{
"epoch": 1.65,
"grad_norm": 6.255503177642822,
"learning_rate": 2.4924429788403408e-05,
"loss": 0.1065,
"step": 2230
},
{
"epoch": 1.66,
"grad_norm": 10.385390281677246,
"learning_rate": 2.478702940368233e-05,
"loss": 0.158,
"step": 2240
},
{
"epoch": 1.67,
"grad_norm": 16.948211669921875,
"learning_rate": 2.4649629018961254e-05,
"loss": 0.1061,
"step": 2250
},
{
"epoch": 1.68,
"grad_norm": 13.97541618347168,
"learning_rate": 2.4512228634240177e-05,
"loss": 0.1213,
"step": 2260
},
{
"epoch": 1.68,
"grad_norm": 10.793522834777832,
"learning_rate": 2.43748282495191e-05,
"loss": 0.1225,
"step": 2270
},
{
"epoch": 1.69,
"grad_norm": 4.745506763458252,
"learning_rate": 2.4237427864798023e-05,
"loss": 0.1416,
"step": 2280
},
{
"epoch": 1.7,
"grad_norm": 4.898324012756348,
"learning_rate": 2.4100027480076946e-05,
"loss": 0.128,
"step": 2290
},
{
"epoch": 1.71,
"grad_norm": 11.247018814086914,
"learning_rate": 2.396262709535587e-05,
"loss": 0.1516,
"step": 2300
},
{
"epoch": 1.71,
"grad_norm": 4.54779577255249,
"learning_rate": 2.382522671063479e-05,
"loss": 0.1234,
"step": 2310
},
{
"epoch": 1.72,
"grad_norm": 23.603256225585938,
"learning_rate": 2.3687826325913716e-05,
"loss": 0.1493,
"step": 2320
},
{
"epoch": 1.73,
"grad_norm": 16.86369514465332,
"learning_rate": 2.3550425941192635e-05,
"loss": 0.1194,
"step": 2330
},
{
"epoch": 1.74,
"grad_norm": 15.160490989685059,
"learning_rate": 2.341302555647156e-05,
"loss": 0.1815,
"step": 2340
},
{
"epoch": 1.74,
"grad_norm": 7.883148670196533,
"learning_rate": 2.327562517175048e-05,
"loss": 0.1361,
"step": 2350
},
{
"epoch": 1.75,
"grad_norm": 7.599252700805664,
"learning_rate": 2.3138224787029405e-05,
"loss": 0.1069,
"step": 2360
},
{
"epoch": 1.76,
"grad_norm": 17.904321670532227,
"learning_rate": 2.3000824402308328e-05,
"loss": 0.1359,
"step": 2370
},
{
"epoch": 1.77,
"grad_norm": 8.08504581451416,
"learning_rate": 2.286342401758725e-05,
"loss": 0.1313,
"step": 2380
},
{
"epoch": 1.77,
"grad_norm": 5.088573455810547,
"learning_rate": 2.272602363286617e-05,
"loss": 0.0998,
"step": 2390
},
{
"epoch": 1.78,
"grad_norm": 10.284459114074707,
"learning_rate": 2.2588623248145097e-05,
"loss": 0.1156,
"step": 2400
},
{
"epoch": 1.79,
"grad_norm": 16.700557708740234,
"learning_rate": 2.2451222863424017e-05,
"loss": 0.1433,
"step": 2410
},
{
"epoch": 1.8,
"grad_norm": 10.368426322937012,
"learning_rate": 2.2313822478702943e-05,
"loss": 0.1465,
"step": 2420
},
{
"epoch": 1.8,
"grad_norm": 3.7450640201568604,
"learning_rate": 2.2176422093981863e-05,
"loss": 0.1323,
"step": 2430
},
{
"epoch": 1.81,
"grad_norm": 8.52283763885498,
"learning_rate": 2.2039021709260786e-05,
"loss": 0.1193,
"step": 2440
},
{
"epoch": 1.82,
"grad_norm": 4.979620456695557,
"learning_rate": 2.190162132453971e-05,
"loss": 0.1261,
"step": 2450
},
{
"epoch": 1.82,
"grad_norm": 10.779509544372559,
"learning_rate": 2.1764220939818633e-05,
"loss": 0.1335,
"step": 2460
},
{
"epoch": 1.83,
"grad_norm": 5.424500465393066,
"learning_rate": 2.1626820555097556e-05,
"loss": 0.1625,
"step": 2470
},
{
"epoch": 1.84,
"grad_norm": 5.80781364440918,
"learning_rate": 2.148942017037648e-05,
"loss": 0.1196,
"step": 2480
},
{
"epoch": 1.85,
"grad_norm": 4.908621788024902,
"learning_rate": 2.13520197856554e-05,
"loss": 0.1343,
"step": 2490
},
{
"epoch": 1.85,
"grad_norm": 8.16831111907959,
"learning_rate": 2.1214619400934325e-05,
"loss": 0.1032,
"step": 2500
},
{
"epoch": 1.86,
"grad_norm": 8.689203262329102,
"learning_rate": 2.1077219016213245e-05,
"loss": 0.129,
"step": 2510
},
{
"epoch": 1.87,
"grad_norm": 7.2602152824401855,
"learning_rate": 2.093981863149217e-05,
"loss": 0.0866,
"step": 2520
},
{
"epoch": 1.88,
"grad_norm": 6.816242218017578,
"learning_rate": 2.080241824677109e-05,
"loss": 0.1175,
"step": 2530
},
{
"epoch": 1.88,
"grad_norm": 8.618610382080078,
"learning_rate": 2.0665017862050014e-05,
"loss": 0.119,
"step": 2540
},
{
"epoch": 1.89,
"grad_norm": 7.9733428955078125,
"learning_rate": 2.0527617477328937e-05,
"loss": 0.143,
"step": 2550
},
{
"epoch": 1.9,
"grad_norm": 6.070068836212158,
"learning_rate": 2.039021709260786e-05,
"loss": 0.1493,
"step": 2560
},
{
"epoch": 1.91,
"grad_norm": 15.120218276977539,
"learning_rate": 2.0252816707886783e-05,
"loss": 0.1147,
"step": 2570
},
{
"epoch": 1.91,
"grad_norm": 9.828227043151855,
"learning_rate": 2.0115416323165706e-05,
"loss": 0.1111,
"step": 2580
},
{
"epoch": 1.92,
"grad_norm": 14.995918273925781,
"learning_rate": 1.9978015938444626e-05,
"loss": 0.1434,
"step": 2590
},
{
"epoch": 1.93,
"grad_norm": 9.751655578613281,
"learning_rate": 1.9840615553723553e-05,
"loss": 0.1142,
"step": 2600
},
{
"epoch": 1.94,
"grad_norm": 8.952950477600098,
"learning_rate": 1.9703215169002472e-05,
"loss": 0.1395,
"step": 2610
},
{
"epoch": 1.94,
"grad_norm": 5.2864179611206055,
"learning_rate": 1.9565814784281396e-05,
"loss": 0.1205,
"step": 2620
},
{
"epoch": 1.95,
"grad_norm": 6.446427345275879,
"learning_rate": 1.942841439956032e-05,
"loss": 0.1067,
"step": 2630
},
{
"epoch": 1.96,
"grad_norm": 14.247118949890137,
"learning_rate": 1.9291014014839242e-05,
"loss": 0.1142,
"step": 2640
},
{
"epoch": 1.97,
"grad_norm": 9.912723541259766,
"learning_rate": 1.9153613630118165e-05,
"loss": 0.1004,
"step": 2650
},
{
"epoch": 1.97,
"grad_norm": 12.7068452835083,
"learning_rate": 1.9016213245397088e-05,
"loss": 0.1474,
"step": 2660
},
{
"epoch": 1.98,
"grad_norm": 10.02779483795166,
"learning_rate": 1.887881286067601e-05,
"loss": 0.1092,
"step": 2670
},
{
"epoch": 1.99,
"grad_norm": 9.037574768066406,
"learning_rate": 1.8741412475954934e-05,
"loss": 0.1289,
"step": 2680
},
{
"epoch": 2.0,
"grad_norm": 8.411940574645996,
"learning_rate": 1.8604012091233854e-05,
"loss": 0.1333,
"step": 2690
},
{
"epoch": 2.0,
"eval_accuracy": 0.9724891892831886,
"eval_f1": 0.9351904956984842,
"eval_loss": 0.07147805392742157,
"eval_precision": 0.9585689491069929,
"eval_recall": 0.9129252426149088,
"eval_roc_auc": 0.9959651902460049,
"eval_runtime": 142.5207,
"eval_samples_per_second": 605.224,
"eval_steps_per_second": 37.833,
"step": 2696
},
{
"epoch": 2.0,
"grad_norm": 3.5268750190734863,
"learning_rate": 1.846661170651278e-05,
"loss": 0.097,
"step": 2700
},
{
"epoch": 2.01,
"grad_norm": 6.344755172729492,
"learning_rate": 1.83292113217917e-05,
"loss": 0.1026,
"step": 2710
},
{
"epoch": 2.02,
"grad_norm": 6.7241411209106445,
"learning_rate": 1.8191810937070623e-05,
"loss": 0.1087,
"step": 2720
},
{
"epoch": 2.03,
"grad_norm": 12.02433967590332,
"learning_rate": 1.805441055234955e-05,
"loss": 0.1222,
"step": 2730
},
{
"epoch": 2.03,
"grad_norm": 11.566658973693848,
"learning_rate": 1.791701016762847e-05,
"loss": 0.1126,
"step": 2740
},
{
"epoch": 2.04,
"grad_norm": 8.08702278137207,
"learning_rate": 1.7779609782907393e-05,
"loss": 0.0974,
"step": 2750
},
{
"epoch": 2.05,
"grad_norm": 8.12630558013916,
"learning_rate": 1.7642209398186316e-05,
"loss": 0.0968,
"step": 2760
},
{
"epoch": 2.05,
"grad_norm": 10.757040023803711,
"learning_rate": 1.750480901346524e-05,
"loss": 0.0907,
"step": 2770
},
{
"epoch": 2.06,
"grad_norm": 12.3939847946167,
"learning_rate": 1.7367408628744162e-05,
"loss": 0.101,
"step": 2780
},
{
"epoch": 2.07,
"grad_norm": 6.2936320304870605,
"learning_rate": 1.7230008244023085e-05,
"loss": 0.0963,
"step": 2790
},
{
"epoch": 2.08,
"grad_norm": 7.650388717651367,
"learning_rate": 1.7092607859302008e-05,
"loss": 0.1226,
"step": 2800
},
{
"epoch": 2.08,
"grad_norm": 2.8404643535614014,
"learning_rate": 1.695520747458093e-05,
"loss": 0.1024,
"step": 2810
},
{
"epoch": 2.09,
"grad_norm": 7.259866714477539,
"learning_rate": 1.681780708985985e-05,
"loss": 0.1107,
"step": 2820
},
{
"epoch": 2.1,
"grad_norm": 4.809945583343506,
"learning_rate": 1.6680406705138778e-05,
"loss": 0.0653,
"step": 2830
},
{
"epoch": 2.11,
"grad_norm": 11.47872257232666,
"learning_rate": 1.6543006320417697e-05,
"loss": 0.1182,
"step": 2840
},
{
"epoch": 2.11,
"grad_norm": 10.535073280334473,
"learning_rate": 1.640560593569662e-05,
"loss": 0.1149,
"step": 2850
},
{
"epoch": 2.12,
"grad_norm": 6.198659420013428,
"learning_rate": 1.6268205550975543e-05,
"loss": 0.0977,
"step": 2860
},
{
"epoch": 2.13,
"grad_norm": 12.449614524841309,
"learning_rate": 1.6130805166254467e-05,
"loss": 0.1003,
"step": 2870
},
{
"epoch": 2.14,
"grad_norm": 7.644033908843994,
"learning_rate": 1.599340478153339e-05,
"loss": 0.089,
"step": 2880
},
{
"epoch": 2.14,
"grad_norm": 13.20606803894043,
"learning_rate": 1.5856004396812313e-05,
"loss": 0.1022,
"step": 2890
},
{
"epoch": 2.15,
"grad_norm": 8.178515434265137,
"learning_rate": 1.5718604012091236e-05,
"loss": 0.119,
"step": 2900
},
{
"epoch": 2.16,
"grad_norm": 14.868145942687988,
"learning_rate": 1.558120362737016e-05,
"loss": 0.0741,
"step": 2910
},
{
"epoch": 2.17,
"grad_norm": 16.38283348083496,
"learning_rate": 1.544380324264908e-05,
"loss": 0.1044,
"step": 2920
},
{
"epoch": 2.17,
"grad_norm": 17.822145462036133,
"learning_rate": 1.5306402857928005e-05,
"loss": 0.1221,
"step": 2930
},
{
"epoch": 2.18,
"grad_norm": 6.547523021697998,
"learning_rate": 1.5169002473206925e-05,
"loss": 0.141,
"step": 2940
},
{
"epoch": 2.19,
"grad_norm": 7.0455241203308105,
"learning_rate": 1.503160208848585e-05,
"loss": 0.0543,
"step": 2950
},
{
"epoch": 2.2,
"grad_norm": 4.661613464355469,
"learning_rate": 1.4894201703764771e-05,
"loss": 0.0722,
"step": 2960
},
{
"epoch": 2.2,
"grad_norm": 11.065844535827637,
"learning_rate": 1.4756801319043694e-05,
"loss": 0.1073,
"step": 2970
},
{
"epoch": 2.21,
"grad_norm": 13.199024200439453,
"learning_rate": 1.4619400934322616e-05,
"loss": 0.1129,
"step": 2980
},
{
"epoch": 2.22,
"grad_norm": 11.305782318115234,
"learning_rate": 1.448200054960154e-05,
"loss": 0.0985,
"step": 2990
},
{
"epoch": 2.23,
"grad_norm": 7.05079460144043,
"learning_rate": 1.4344600164880462e-05,
"loss": 0.0877,
"step": 3000
},
{
"epoch": 2.23,
"grad_norm": 4.738026142120361,
"learning_rate": 1.4207199780159385e-05,
"loss": 0.0945,
"step": 3010
},
{
"epoch": 2.24,
"grad_norm": 5.532888889312744,
"learning_rate": 1.4069799395438306e-05,
"loss": 0.1042,
"step": 3020
},
{
"epoch": 2.25,
"grad_norm": 10.955171585083008,
"learning_rate": 1.3932399010717231e-05,
"loss": 0.0963,
"step": 3030
},
{
"epoch": 2.26,
"grad_norm": 9.255034446716309,
"learning_rate": 1.3794998625996153e-05,
"loss": 0.1216,
"step": 3040
},
{
"epoch": 2.26,
"grad_norm": 4.357778072357178,
"learning_rate": 1.3657598241275078e-05,
"loss": 0.1087,
"step": 3050
},
{
"epoch": 2.27,
"grad_norm": 9.293482780456543,
"learning_rate": 1.3520197856553999e-05,
"loss": 0.0875,
"step": 3060
},
{
"epoch": 2.28,
"grad_norm": 5.271756649017334,
"learning_rate": 1.3382797471832922e-05,
"loss": 0.0885,
"step": 3070
},
{
"epoch": 2.28,
"grad_norm": 9.91033935546875,
"learning_rate": 1.3245397087111843e-05,
"loss": 0.0788,
"step": 3080
},
{
"epoch": 2.29,
"grad_norm": 9.211901664733887,
"learning_rate": 1.3107996702390768e-05,
"loss": 0.0856,
"step": 3090
},
{
"epoch": 2.3,
"grad_norm": 16.67748260498047,
"learning_rate": 1.297059631766969e-05,
"loss": 0.0816,
"step": 3100
},
{
"epoch": 2.31,
"grad_norm": 11.928816795349121,
"learning_rate": 1.2833195932948613e-05,
"loss": 0.119,
"step": 3110
},
{
"epoch": 2.31,
"grad_norm": 10.408892631530762,
"learning_rate": 1.2695795548227534e-05,
"loss": 0.0986,
"step": 3120
},
{
"epoch": 2.32,
"grad_norm": 8.842406272888184,
"learning_rate": 1.2558395163506459e-05,
"loss": 0.0993,
"step": 3130
},
{
"epoch": 2.33,
"grad_norm": 7.2468085289001465,
"learning_rate": 1.2420994778785382e-05,
"loss": 0.0858,
"step": 3140
},
{
"epoch": 2.34,
"grad_norm": 6.119896411895752,
"learning_rate": 1.2283594394064305e-05,
"loss": 0.0874,
"step": 3150
},
{
"epoch": 2.34,
"grad_norm": 12.349807739257812,
"learning_rate": 1.2146194009343227e-05,
"loss": 0.1152,
"step": 3160
},
{
"epoch": 2.35,
"grad_norm": 7.183751106262207,
"learning_rate": 1.200879362462215e-05,
"loss": 0.0789,
"step": 3170
},
{
"epoch": 2.36,
"grad_norm": 10.576013565063477,
"learning_rate": 1.1871393239901073e-05,
"loss": 0.0909,
"step": 3180
},
{
"epoch": 2.37,
"grad_norm": 15.670488357543945,
"learning_rate": 1.1733992855179996e-05,
"loss": 0.0748,
"step": 3190
},
{
"epoch": 2.37,
"grad_norm": 3.484851360321045,
"learning_rate": 1.1596592470458917e-05,
"loss": 0.0883,
"step": 3200
},
{
"epoch": 2.38,
"grad_norm": 2.3166630268096924,
"learning_rate": 1.145919208573784e-05,
"loss": 0.0913,
"step": 3210
},
{
"epoch": 2.39,
"grad_norm": 11.654047012329102,
"learning_rate": 1.1321791701016764e-05,
"loss": 0.0752,
"step": 3220
},
{
"epoch": 2.4,
"grad_norm": 13.143092155456543,
"learning_rate": 1.1184391316295687e-05,
"loss": 0.1091,
"step": 3230
},
{
"epoch": 2.4,
"grad_norm": 11.333166122436523,
"learning_rate": 1.104699093157461e-05,
"loss": 0.0892,
"step": 3240
},
{
"epoch": 2.41,
"grad_norm": 15.304953575134277,
"learning_rate": 1.0909590546853531e-05,
"loss": 0.0945,
"step": 3250
},
{
"epoch": 2.42,
"grad_norm": 3.367475986480713,
"learning_rate": 1.0772190162132454e-05,
"loss": 0.0916,
"step": 3260
},
{
"epoch": 2.43,
"grad_norm": 10.725967407226562,
"learning_rate": 1.0634789777411378e-05,
"loss": 0.1032,
"step": 3270
},
{
"epoch": 2.43,
"grad_norm": 8.29692268371582,
"learning_rate": 1.04973893926903e-05,
"loss": 0.0882,
"step": 3280
},
{
"epoch": 2.44,
"grad_norm": 4.417304515838623,
"learning_rate": 1.0359989007969224e-05,
"loss": 0.0946,
"step": 3290
},
{
"epoch": 2.45,
"grad_norm": 4.891964435577393,
"learning_rate": 1.0222588623248145e-05,
"loss": 0.0796,
"step": 3300
},
{
"epoch": 2.46,
"grad_norm": 11.540854454040527,
"learning_rate": 1.0085188238527068e-05,
"loss": 0.1169,
"step": 3310
},
{
"epoch": 2.46,
"grad_norm": 7.2680768966674805,
"learning_rate": 9.947787853805991e-06,
"loss": 0.0782,
"step": 3320
},
{
"epoch": 2.47,
"grad_norm": 8.265511512756348,
"learning_rate": 9.810387469084915e-06,
"loss": 0.0789,
"step": 3330
},
{
"epoch": 2.48,
"grad_norm": 3.2101237773895264,
"learning_rate": 9.672987084363836e-06,
"loss": 0.1077,
"step": 3340
},
{
"epoch": 2.49,
"grad_norm": 4.780531406402588,
"learning_rate": 9.535586699642759e-06,
"loss": 0.0965,
"step": 3350
},
{
"epoch": 2.49,
"grad_norm": 5.388641834259033,
"learning_rate": 9.398186314921682e-06,
"loss": 0.0934,
"step": 3360
},
{
"epoch": 2.5,
"grad_norm": 17.17147445678711,
"learning_rate": 9.260785930200605e-06,
"loss": 0.0927,
"step": 3370
},
{
"epoch": 2.51,
"grad_norm": 4.758168697357178,
"learning_rate": 9.123385545479528e-06,
"loss": 0.0823,
"step": 3380
},
{
"epoch": 2.51,
"grad_norm": 11.259721755981445,
"learning_rate": 8.98598516075845e-06,
"loss": 0.0865,
"step": 3390
},
{
"epoch": 2.52,
"grad_norm": 6.561680316925049,
"learning_rate": 8.848584776037373e-06,
"loss": 0.099,
"step": 3400
},
{
"epoch": 2.53,
"grad_norm": 3.256338357925415,
"learning_rate": 8.711184391316296e-06,
"loss": 0.0832,
"step": 3410
},
{
"epoch": 2.54,
"grad_norm": 14.987279891967773,
"learning_rate": 8.573784006595219e-06,
"loss": 0.1017,
"step": 3420
},
{
"epoch": 2.54,
"grad_norm": 5.558447360992432,
"learning_rate": 8.436383621874142e-06,
"loss": 0.088,
"step": 3430
},
{
"epoch": 2.55,
"grad_norm": 13.000879287719727,
"learning_rate": 8.298983237153064e-06,
"loss": 0.1042,
"step": 3440
},
{
"epoch": 2.56,
"grad_norm": 7.873579502105713,
"learning_rate": 8.161582852431987e-06,
"loss": 0.0852,
"step": 3450
},
{
"epoch": 2.57,
"grad_norm": 2.538588047027588,
"learning_rate": 8.02418246771091e-06,
"loss": 0.0754,
"step": 3460
},
{
"epoch": 2.57,
"grad_norm": 8.97366714477539,
"learning_rate": 7.886782082989833e-06,
"loss": 0.0961,
"step": 3470
},
{
"epoch": 2.58,
"grad_norm": 9.575042724609375,
"learning_rate": 7.749381698268756e-06,
"loss": 0.0809,
"step": 3480
},
{
"epoch": 2.59,
"grad_norm": 12.21373462677002,
"learning_rate": 7.611981313547678e-06,
"loss": 0.1128,
"step": 3490
},
{
"epoch": 2.6,
"grad_norm": 5.540073871612549,
"learning_rate": 7.474580928826601e-06,
"loss": 0.072,
"step": 3500
},
{
"epoch": 2.6,
"grad_norm": 3.4904701709747314,
"learning_rate": 7.337180544105524e-06,
"loss": 0.0678,
"step": 3510
},
{
"epoch": 2.61,
"grad_norm": 14.816420555114746,
"learning_rate": 7.199780159384446e-06,
"loss": 0.1064,
"step": 3520
},
{
"epoch": 2.62,
"grad_norm": 15.36472225189209,
"learning_rate": 7.062379774663369e-06,
"loss": 0.0922,
"step": 3530
},
{
"epoch": 2.63,
"grad_norm": 19.16846466064453,
"learning_rate": 6.924979389942292e-06,
"loss": 0.0858,
"step": 3540
},
{
"epoch": 2.63,
"grad_norm": 9.295853614807129,
"learning_rate": 6.7875790052212145e-06,
"loss": 0.0935,
"step": 3550
},
{
"epoch": 2.64,
"grad_norm": 2.3949246406555176,
"learning_rate": 6.650178620500138e-06,
"loss": 0.0608,
"step": 3560
},
{
"epoch": 2.65,
"grad_norm": 4.4750471115112305,
"learning_rate": 6.51277823577906e-06,
"loss": 0.0768,
"step": 3570
},
{
"epoch": 2.66,
"grad_norm": 11.135028839111328,
"learning_rate": 6.375377851057983e-06,
"loss": 0.1067,
"step": 3580
},
{
"epoch": 2.66,
"grad_norm": 5.988401889801025,
"learning_rate": 6.237977466336906e-06,
"loss": 0.0963,
"step": 3590
},
{
"epoch": 2.67,
"grad_norm": 3.6631855964660645,
"learning_rate": 6.100577081615829e-06,
"loss": 0.067,
"step": 3600
},
{
"epoch": 2.68,
"grad_norm": 5.774590492248535,
"learning_rate": 5.9631766968947515e-06,
"loss": 0.0955,
"step": 3610
},
{
"epoch": 2.69,
"grad_norm": 8.615982055664062,
"learning_rate": 5.825776312173675e-06,
"loss": 0.1108,
"step": 3620
},
{
"epoch": 2.69,
"grad_norm": 7.245334148406982,
"learning_rate": 5.688375927452598e-06,
"loss": 0.0595,
"step": 3630
},
{
"epoch": 2.7,
"grad_norm": 9.387351989746094,
"learning_rate": 5.55097554273152e-06,
"loss": 0.0988,
"step": 3640
},
{
"epoch": 2.71,
"grad_norm": 14.46756362915039,
"learning_rate": 5.413575158010443e-06,
"loss": 0.0855,
"step": 3650
},
{
"epoch": 2.72,
"grad_norm": 9.338837623596191,
"learning_rate": 5.276174773289365e-06,
"loss": 0.0974,
"step": 3660
},
{
"epoch": 2.72,
"grad_norm": 11.95490837097168,
"learning_rate": 5.1387743885682885e-06,
"loss": 0.1149,
"step": 3670
},
{
"epoch": 2.73,
"grad_norm": 7.305751323699951,
"learning_rate": 5.001374003847211e-06,
"loss": 0.0987,
"step": 3680
},
{
"epoch": 2.74,
"grad_norm": 4.981101036071777,
"learning_rate": 4.863973619126134e-06,
"loss": 0.076,
"step": 3690
},
{
"epoch": 2.74,
"grad_norm": 6.774673938751221,
"learning_rate": 4.726573234405057e-06,
"loss": 0.0722,
"step": 3700
},
{
"epoch": 2.75,
"grad_norm": 9.900213241577148,
"learning_rate": 4.589172849683979e-06,
"loss": 0.0593,
"step": 3710
},
{
"epoch": 2.76,
"grad_norm": 8.229888916015625,
"learning_rate": 4.451772464962902e-06,
"loss": 0.0665,
"step": 3720
},
{
"epoch": 2.77,
"grad_norm": 10.979117393493652,
"learning_rate": 4.314372080241825e-06,
"loss": 0.0993,
"step": 3730
},
{
"epoch": 2.77,
"grad_norm": 6.245596408843994,
"learning_rate": 4.176971695520748e-06,
"loss": 0.0758,
"step": 3740
},
{
"epoch": 2.78,
"grad_norm": 9.754047393798828,
"learning_rate": 4.03957131079967e-06,
"loss": 0.119,
"step": 3750
},
{
"epoch": 2.79,
"grad_norm": 15.147882461547852,
"learning_rate": 3.902170926078593e-06,
"loss": 0.095,
"step": 3760
},
{
"epoch": 2.8,
"grad_norm": 10.091602325439453,
"learning_rate": 3.7647705413575158e-06,
"loss": 0.0766,
"step": 3770
},
{
"epoch": 2.8,
"grad_norm": 16.6304931640625,
"learning_rate": 3.6273701566364385e-06,
"loss": 0.0841,
"step": 3780
},
{
"epoch": 2.81,
"grad_norm": 5.442141532897949,
"learning_rate": 3.489969771915361e-06,
"loss": 0.0974,
"step": 3790
},
{
"epoch": 2.82,
"grad_norm": 4.17081880569458,
"learning_rate": 3.3525693871942843e-06,
"loss": 0.0618,
"step": 3800
},
{
"epoch": 2.83,
"grad_norm": 8.243993759155273,
"learning_rate": 3.215169002473207e-06,
"loss": 0.0676,
"step": 3810
},
{
"epoch": 2.83,
"grad_norm": 8.13919734954834,
"learning_rate": 3.07776861775213e-06,
"loss": 0.0572,
"step": 3820
},
{
"epoch": 2.84,
"grad_norm": 4.752624988555908,
"learning_rate": 2.9403682330310528e-06,
"loss": 0.071,
"step": 3830
},
{
"epoch": 2.85,
"grad_norm": 10.80265998840332,
"learning_rate": 2.8029678483099755e-06,
"loss": 0.0994,
"step": 3840
},
{
"epoch": 2.86,
"grad_norm": 9.523606300354004,
"learning_rate": 2.665567463588898e-06,
"loss": 0.0773,
"step": 3850
},
{
"epoch": 2.86,
"grad_norm": 6.865480422973633,
"learning_rate": 2.528167078867821e-06,
"loss": 0.0533,
"step": 3860
},
{
"epoch": 2.87,
"grad_norm": 7.947544574737549,
"learning_rate": 2.390766694146744e-06,
"loss": 0.0857,
"step": 3870
},
{
"epoch": 2.88,
"grad_norm": 1.29622220993042,
"learning_rate": 2.2533663094256666e-06,
"loss": 0.0919,
"step": 3880
},
{
"epoch": 2.89,
"grad_norm": 13.731074333190918,
"learning_rate": 2.1159659247045893e-06,
"loss": 0.0733,
"step": 3890
},
{
"epoch": 2.89,
"grad_norm": 16.332555770874023,
"learning_rate": 1.978565539983512e-06,
"loss": 0.0654,
"step": 3900
},
{
"epoch": 2.9,
"grad_norm": 9.119608879089355,
"learning_rate": 1.8411651552624347e-06,
"loss": 0.0704,
"step": 3910
},
{
"epoch": 2.91,
"grad_norm": 15.68105411529541,
"learning_rate": 1.7037647705413576e-06,
"loss": 0.0744,
"step": 3920
},
{
"epoch": 2.92,
"grad_norm": 12.381695747375488,
"learning_rate": 1.5663643858202803e-06,
"loss": 0.0707,
"step": 3930
},
{
"epoch": 2.92,
"grad_norm": 7.977994918823242,
"learning_rate": 1.4289640010992032e-06,
"loss": 0.0771,
"step": 3940
},
{
"epoch": 2.93,
"grad_norm": 8.164185523986816,
"learning_rate": 1.2915636163781259e-06,
"loss": 0.0545,
"step": 3950
},
{
"epoch": 2.94,
"grad_norm": 7.262919902801514,
"learning_rate": 1.1541632316570488e-06,
"loss": 0.0805,
"step": 3960
},
{
"epoch": 2.95,
"grad_norm": 11.631789207458496,
"learning_rate": 1.0167628469359715e-06,
"loss": 0.0906,
"step": 3970
},
{
"epoch": 2.95,
"grad_norm": 8.69687557220459,
"learning_rate": 8.793624622148942e-07,
"loss": 0.0788,
"step": 3980
},
{
"epoch": 2.96,
"grad_norm": 6.391763687133789,
"learning_rate": 7.419620774938171e-07,
"loss": 0.0767,
"step": 3990
},
{
"epoch": 2.97,
"grad_norm": 12.519146919250488,
"learning_rate": 6.045616927727397e-07,
"loss": 0.0805,
"step": 4000
},
{
"epoch": 2.97,
"grad_norm": 4.441882133483887,
"learning_rate": 4.6716130805166254e-07,
"loss": 0.0692,
"step": 4010
},
{
"epoch": 2.98,
"grad_norm": 7.949933052062988,
"learning_rate": 3.2976092333058533e-07,
"loss": 0.0671,
"step": 4020
},
{
"epoch": 2.99,
"grad_norm": 7.948129653930664,
"learning_rate": 1.9236053860950813e-07,
"loss": 0.0709,
"step": 4030
},
{
"epoch": 3.0,
"grad_norm": 2.767885208129883,
"learning_rate": 5.4960153888430885e-08,
"loss": 0.0809,
"step": 4040
},
{
"epoch": 3.0,
"eval_accuracy": 0.9803726074405555,
"eval_f1": 0.9539181795911702,
"eval_loss": 0.05200228467583656,
"eval_precision": 0.9743119266055046,
"eval_recall": 0.9343606697237923,
"eval_roc_auc": 0.997971591737313,
"eval_runtime": 144.0316,
"eval_samples_per_second": 598.875,
"eval_steps_per_second": 37.436,
"step": 4044
},
{
"epoch": 3.0,
"step": 4044,
"total_flos": 6.432009125858943e+18,
"train_loss": 0.18140344330571173,
"train_runtime": 1283.9356,
"train_samples_per_second": 201.545,
"train_steps_per_second": 3.15
}
],
"logging_steps": 10,
"max_steps": 4044,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 6.432009125858943e+18,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}