|
{
|
|
"best_metric": 0.8892018779342723,
|
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-leukemia-08-2024.v1.1\\checkpoint-2187",
|
|
"epoch": 9.984,
|
|
"eval_steps": 500,
|
|
"global_step": 3120,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.032,
|
|
"grad_norm": 2.98797607421875,
|
|
"learning_rate": 1.6025641025641025e-06,
|
|
"loss": 0.712,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.064,
|
|
"grad_norm": 6.277791976928711,
|
|
"learning_rate": 3.205128205128205e-06,
|
|
"loss": 0.6981,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.096,
|
|
"grad_norm": 5.27135705947876,
|
|
"learning_rate": 4.807692307692308e-06,
|
|
"loss": 0.6783,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.128,
|
|
"grad_norm": 6.831840991973877,
|
|
"learning_rate": 6.41025641025641e-06,
|
|
"loss": 0.6498,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"grad_norm": 8.679888725280762,
|
|
"learning_rate": 8.012820512820515e-06,
|
|
"loss": 0.6079,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.192,
|
|
"grad_norm": 11.260509490966797,
|
|
"learning_rate": 9.615384615384616e-06,
|
|
"loss": 0.579,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.224,
|
|
"grad_norm": 17.14796257019043,
|
|
"learning_rate": 1.1217948717948719e-05,
|
|
"loss": 0.5673,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.256,
|
|
"grad_norm": 10.239635467529297,
|
|
"learning_rate": 1.282051282051282e-05,
|
|
"loss": 0.5263,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.288,
|
|
"grad_norm": 7.6730055809021,
|
|
"learning_rate": 1.4423076923076923e-05,
|
|
"loss": 0.5371,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"grad_norm": 7.861423492431641,
|
|
"learning_rate": 1.602564102564103e-05,
|
|
"loss": 0.5144,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.352,
|
|
"grad_norm": 9.051122665405273,
|
|
"learning_rate": 1.762820512820513e-05,
|
|
"loss": 0.5046,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.384,
|
|
"grad_norm": 14.384810447692871,
|
|
"learning_rate": 1.923076923076923e-05,
|
|
"loss": 0.4784,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.416,
|
|
"grad_norm": 12.508864402770996,
|
|
"learning_rate": 2.0833333333333336e-05,
|
|
"loss": 0.4626,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.448,
|
|
"grad_norm": 14.830451011657715,
|
|
"learning_rate": 2.2435897435897437e-05,
|
|
"loss": 0.4459,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"grad_norm": 52.42909622192383,
|
|
"learning_rate": 2.4038461538461542e-05,
|
|
"loss": 0.4163,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.512,
|
|
"grad_norm": 32.466487884521484,
|
|
"learning_rate": 2.564102564102564e-05,
|
|
"loss": 0.4801,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.544,
|
|
"grad_norm": 30.3638973236084,
|
|
"learning_rate": 2.724358974358974e-05,
|
|
"loss": 0.4896,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.576,
|
|
"grad_norm": 27.437076568603516,
|
|
"learning_rate": 2.8846153846153845e-05,
|
|
"loss": 0.4598,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.608,
|
|
"grad_norm": 11.979604721069336,
|
|
"learning_rate": 3.0448717948717947e-05,
|
|
"loss": 0.421,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"grad_norm": 68.23617553710938,
|
|
"learning_rate": 3.205128205128206e-05,
|
|
"loss": 0.4518,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.672,
|
|
"grad_norm": 41.465309143066406,
|
|
"learning_rate": 3.365384615384616e-05,
|
|
"loss": 0.4403,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.704,
|
|
"grad_norm": 14.963610649108887,
|
|
"learning_rate": 3.525641025641026e-05,
|
|
"loss": 0.4282,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.736,
|
|
"grad_norm": 16.748451232910156,
|
|
"learning_rate": 3.685897435897436e-05,
|
|
"loss": 0.3553,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.768,
|
|
"grad_norm": 47.92767333984375,
|
|
"learning_rate": 3.846153846153846e-05,
|
|
"loss": 0.4068,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"grad_norm": 32.47563552856445,
|
|
"learning_rate": 4.006410256410257e-05,
|
|
"loss": 0.4356,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.832,
|
|
"grad_norm": 32.39358901977539,
|
|
"learning_rate": 4.166666666666667e-05,
|
|
"loss": 0.3873,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.864,
|
|
"grad_norm": 17.5948429107666,
|
|
"learning_rate": 4.326923076923077e-05,
|
|
"loss": 0.3428,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.896,
|
|
"grad_norm": 34.92612075805664,
|
|
"learning_rate": 4.4871794871794874e-05,
|
|
"loss": 0.4007,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.928,
|
|
"grad_norm": 12.984992980957031,
|
|
"learning_rate": 4.6474358974358976e-05,
|
|
"loss": 0.4148,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"grad_norm": 62.389808654785156,
|
|
"learning_rate": 4.8076923076923084e-05,
|
|
"loss": 0.3258,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.992,
|
|
"grad_norm": 20.061573028564453,
|
|
"learning_rate": 4.9679487179487185e-05,
|
|
"loss": 0.32,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.9984,
|
|
"eval_accuracy": 0.6910798122065728,
|
|
"eval_loss": 0.9902069568634033,
|
|
"eval_runtime": 15.0205,
|
|
"eval_samples_per_second": 70.903,
|
|
"eval_steps_per_second": 2.264,
|
|
"step": 312
|
|
},
|
|
{
|
|
"epoch": 1.024,
|
|
"grad_norm": 95.87650299072266,
|
|
"learning_rate": 4.985754985754986e-05,
|
|
"loss": 0.4095,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 1.056,
|
|
"grad_norm": 15.716072082519531,
|
|
"learning_rate": 4.9679487179487185e-05,
|
|
"loss": 0.3434,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 1.088,
|
|
"grad_norm": 24.56471061706543,
|
|
"learning_rate": 4.95014245014245e-05,
|
|
"loss": 0.3115,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 1.12,
|
|
"grad_norm": 27.441110610961914,
|
|
"learning_rate": 4.932336182336182e-05,
|
|
"loss": 0.3818,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 1.152,
|
|
"grad_norm": 19.63530921936035,
|
|
"learning_rate": 4.9145299145299147e-05,
|
|
"loss": 0.4001,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 1.184,
|
|
"grad_norm": 59.710182189941406,
|
|
"learning_rate": 4.896723646723647e-05,
|
|
"loss": 0.3836,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 1.216,
|
|
"grad_norm": 11.739165306091309,
|
|
"learning_rate": 4.878917378917379e-05,
|
|
"loss": 0.3505,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 1.248,
|
|
"grad_norm": 9.130122184753418,
|
|
"learning_rate": 4.8611111111111115e-05,
|
|
"loss": 0.2724,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"grad_norm": 35.71299362182617,
|
|
"learning_rate": 4.8433048433048433e-05,
|
|
"loss": 0.2992,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 1.312,
|
|
"grad_norm": 23.9375,
|
|
"learning_rate": 4.825498575498576e-05,
|
|
"loss": 0.3046,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 1.3439999999999999,
|
|
"grad_norm": 25.04848289489746,
|
|
"learning_rate": 4.8076923076923084e-05,
|
|
"loss": 0.315,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 1.376,
|
|
"grad_norm": 18.383527755737305,
|
|
"learning_rate": 4.78988603988604e-05,
|
|
"loss": 0.3109,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 1.408,
|
|
"grad_norm": 13.666149139404297,
|
|
"learning_rate": 4.772079772079772e-05,
|
|
"loss": 0.2999,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 1.44,
|
|
"grad_norm": 9.764185905456543,
|
|
"learning_rate": 4.7542735042735045e-05,
|
|
"loss": 0.2615,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 1.472,
|
|
"grad_norm": 15.155381202697754,
|
|
"learning_rate": 4.736467236467237e-05,
|
|
"loss": 0.261,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 1.504,
|
|
"grad_norm": 14.417835235595703,
|
|
"learning_rate": 4.718660968660969e-05,
|
|
"loss": 0.2596,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 1.536,
|
|
"grad_norm": 26.80813217163086,
|
|
"learning_rate": 4.700854700854701e-05,
|
|
"loss": 0.3011,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 1.568,
|
|
"grad_norm": 29.056140899658203,
|
|
"learning_rate": 4.683048433048433e-05,
|
|
"loss": 0.2741,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"grad_norm": 8.897234916687012,
|
|
"learning_rate": 4.665242165242166e-05,
|
|
"loss": 0.2906,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 1.6320000000000001,
|
|
"grad_norm": 15.488897323608398,
|
|
"learning_rate": 4.6474358974358976e-05,
|
|
"loss": 0.3148,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 1.6640000000000001,
|
|
"grad_norm": 45.335018157958984,
|
|
"learning_rate": 4.62962962962963e-05,
|
|
"loss": 0.3026,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 1.696,
|
|
"grad_norm": 8.628056526184082,
|
|
"learning_rate": 4.611823361823362e-05,
|
|
"loss": 0.2719,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 1.728,
|
|
"grad_norm": 11.930350303649902,
|
|
"learning_rate": 4.594017094017094e-05,
|
|
"loss": 0.2831,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 1.76,
|
|
"grad_norm": 7.246833324432373,
|
|
"learning_rate": 4.576210826210827e-05,
|
|
"loss": 0.2292,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 1.792,
|
|
"grad_norm": 31.572784423828125,
|
|
"learning_rate": 4.558404558404559e-05,
|
|
"loss": 0.2698,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 1.8239999999999998,
|
|
"grad_norm": 9.255568504333496,
|
|
"learning_rate": 4.5405982905982906e-05,
|
|
"loss": 0.2469,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 1.8559999999999999,
|
|
"grad_norm": 13.890963554382324,
|
|
"learning_rate": 4.522792022792023e-05,
|
|
"loss": 0.2614,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 1.888,
|
|
"grad_norm": 13.776374816894531,
|
|
"learning_rate": 4.504985754985755e-05,
|
|
"loss": 0.264,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 1.92,
|
|
"grad_norm": 7.997173309326172,
|
|
"learning_rate": 4.4871794871794874e-05,
|
|
"loss": 0.2548,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 1.952,
|
|
"grad_norm": 44.6745719909668,
|
|
"learning_rate": 4.46937321937322e-05,
|
|
"loss": 0.2476,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 1.984,
|
|
"grad_norm": 13.378036499023438,
|
|
"learning_rate": 4.451566951566952e-05,
|
|
"loss": 0.2625,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"eval_accuracy": 0.7690140845070422,
|
|
"eval_loss": 0.5525702238082886,
|
|
"eval_runtime": 14.9955,
|
|
"eval_samples_per_second": 71.021,
|
|
"eval_steps_per_second": 2.267,
|
|
"step": 625
|
|
},
|
|
{
|
|
"epoch": 2.016,
|
|
"grad_norm": 7.805148601531982,
|
|
"learning_rate": 4.4337606837606836e-05,
|
|
"loss": 0.2232,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 2.048,
|
|
"grad_norm": 10.30744457244873,
|
|
"learning_rate": 4.415954415954416e-05,
|
|
"loss": 0.2577,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 2.08,
|
|
"grad_norm": 15.62030029296875,
|
|
"learning_rate": 4.3981481481481486e-05,
|
|
"loss": 0.2273,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 2.112,
|
|
"grad_norm": 28.10418128967285,
|
|
"learning_rate": 4.3803418803418805e-05,
|
|
"loss": 0.243,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 2.144,
|
|
"grad_norm": 9.574631690979004,
|
|
"learning_rate": 4.362535612535612e-05,
|
|
"loss": 0.2286,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 2.176,
|
|
"grad_norm": 14.983197212219238,
|
|
"learning_rate": 4.344729344729345e-05,
|
|
"loss": 0.2173,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 2.208,
|
|
"grad_norm": 19.074600219726562,
|
|
"learning_rate": 4.326923076923077e-05,
|
|
"loss": 0.2339,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 2.24,
|
|
"grad_norm": 27.23953628540039,
|
|
"learning_rate": 4.309116809116809e-05,
|
|
"loss": 0.2008,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 2.2720000000000002,
|
|
"grad_norm": 13.12559986114502,
|
|
"learning_rate": 4.291310541310542e-05,
|
|
"loss": 0.1996,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 2.304,
|
|
"grad_norm": 22.73211097717285,
|
|
"learning_rate": 4.2735042735042735e-05,
|
|
"loss": 0.1986,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 2.336,
|
|
"grad_norm": 12.57758903503418,
|
|
"learning_rate": 4.255698005698006e-05,
|
|
"loss": 0.2283,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 2.368,
|
|
"grad_norm": 61.22172927856445,
|
|
"learning_rate": 4.2378917378917385e-05,
|
|
"loss": 0.2175,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 2.4,
|
|
"grad_norm": 14.797411918640137,
|
|
"learning_rate": 4.2200854700854704e-05,
|
|
"loss": 0.1758,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 2.432,
|
|
"grad_norm": 9.504317283630371,
|
|
"learning_rate": 4.202279202279202e-05,
|
|
"loss": 0.2087,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 2.464,
|
|
"grad_norm": 8.436298370361328,
|
|
"learning_rate": 4.184472934472935e-05,
|
|
"loss": 0.2318,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 2.496,
|
|
"grad_norm": 31.693523406982422,
|
|
"learning_rate": 4.166666666666667e-05,
|
|
"loss": 0.1909,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 2.528,
|
|
"grad_norm": 14.50406265258789,
|
|
"learning_rate": 4.148860398860399e-05,
|
|
"loss": 0.2157,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 2.56,
|
|
"grad_norm": 25.511077880859375,
|
|
"learning_rate": 4.131054131054131e-05,
|
|
"loss": 0.2237,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 2.592,
|
|
"grad_norm": 6.311196804046631,
|
|
"learning_rate": 4.1132478632478634e-05,
|
|
"loss": 0.1922,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 2.624,
|
|
"grad_norm": 37.99801254272461,
|
|
"learning_rate": 4.095441595441596e-05,
|
|
"loss": 0.1835,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 2.656,
|
|
"grad_norm": 48.04703140258789,
|
|
"learning_rate": 4.077635327635328e-05,
|
|
"loss": 0.191,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 2.6879999999999997,
|
|
"grad_norm": 52.908992767333984,
|
|
"learning_rate": 4.05982905982906e-05,
|
|
"loss": 0.2908,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 2.7199999999999998,
|
|
"grad_norm": 8.987102508544922,
|
|
"learning_rate": 4.042022792022792e-05,
|
|
"loss": 0.216,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 2.752,
|
|
"grad_norm": 44.67879104614258,
|
|
"learning_rate": 4.024216524216524e-05,
|
|
"loss": 0.1991,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 2.784,
|
|
"grad_norm": 23.197927474975586,
|
|
"learning_rate": 4.006410256410257e-05,
|
|
"loss": 0.1751,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 2.816,
|
|
"grad_norm": 29.43416404724121,
|
|
"learning_rate": 3.988603988603989e-05,
|
|
"loss": 0.2064,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 2.848,
|
|
"grad_norm": 10.251945495605469,
|
|
"learning_rate": 3.970797720797721e-05,
|
|
"loss": 0.178,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 2.88,
|
|
"grad_norm": 29.398643493652344,
|
|
"learning_rate": 3.952991452991453e-05,
|
|
"loss": 0.1813,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 2.912,
|
|
"grad_norm": 37.31789016723633,
|
|
"learning_rate": 3.935185185185186e-05,
|
|
"loss": 0.1778,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 2.944,
|
|
"grad_norm": 21.298931121826172,
|
|
"learning_rate": 3.9173789173789176e-05,
|
|
"loss": 0.2124,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 2.976,
|
|
"grad_norm": 10.21948528289795,
|
|
"learning_rate": 3.89957264957265e-05,
|
|
"loss": 0.1584,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 2.9984,
|
|
"eval_accuracy": 0.8018779342723005,
|
|
"eval_loss": 0.560497522354126,
|
|
"eval_runtime": 14.76,
|
|
"eval_samples_per_second": 72.155,
|
|
"eval_steps_per_second": 2.304,
|
|
"step": 937
|
|
},
|
|
{
|
|
"epoch": 3.008,
|
|
"grad_norm": 9.502830505371094,
|
|
"learning_rate": 3.881766381766382e-05,
|
|
"loss": 0.1569,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 3.04,
|
|
"grad_norm": 23.640422821044922,
|
|
"learning_rate": 3.863960113960114e-05,
|
|
"loss": 0.2172,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 3.072,
|
|
"grad_norm": 7.1912312507629395,
|
|
"learning_rate": 3.846153846153846e-05,
|
|
"loss": 0.1954,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 3.104,
|
|
"grad_norm": 61.15785598754883,
|
|
"learning_rate": 3.828347578347579e-05,
|
|
"loss": 0.2364,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 3.136,
|
|
"grad_norm": 11.975686073303223,
|
|
"learning_rate": 3.8105413105413106e-05,
|
|
"loss": 0.1856,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 3.168,
|
|
"grad_norm": 13.5460844039917,
|
|
"learning_rate": 3.7927350427350425e-05,
|
|
"loss": 0.1608,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 3.2,
|
|
"grad_norm": 13.04128646850586,
|
|
"learning_rate": 3.774928774928775e-05,
|
|
"loss": 0.1485,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 3.232,
|
|
"grad_norm": 17.87329864501953,
|
|
"learning_rate": 3.7571225071225075e-05,
|
|
"loss": 0.1782,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 3.2640000000000002,
|
|
"grad_norm": 21.913515090942383,
|
|
"learning_rate": 3.739316239316239e-05,
|
|
"loss": 0.1636,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 3.296,
|
|
"grad_norm": 53.9290771484375,
|
|
"learning_rate": 3.721509971509972e-05,
|
|
"loss": 0.1869,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 3.328,
|
|
"grad_norm": 12.648904800415039,
|
|
"learning_rate": 3.7037037037037037e-05,
|
|
"loss": 0.1374,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 3.36,
|
|
"grad_norm": 30.469200134277344,
|
|
"learning_rate": 3.685897435897436e-05,
|
|
"loss": 0.1677,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 3.392,
|
|
"grad_norm": 53.974571228027344,
|
|
"learning_rate": 3.668091168091169e-05,
|
|
"loss": 0.1297,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 3.424,
|
|
"grad_norm": 13.038790702819824,
|
|
"learning_rate": 3.6502849002849005e-05,
|
|
"loss": 0.1773,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 3.456,
|
|
"grad_norm": 10.653145790100098,
|
|
"learning_rate": 3.6324786324786323e-05,
|
|
"loss": 0.1672,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 3.488,
|
|
"grad_norm": 19.27906608581543,
|
|
"learning_rate": 3.614672364672365e-05,
|
|
"loss": 0.1587,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 3.52,
|
|
"grad_norm": 9.866512298583984,
|
|
"learning_rate": 3.5968660968660974e-05,
|
|
"loss": 0.1566,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 3.552,
|
|
"grad_norm": 24.01140785217285,
|
|
"learning_rate": 3.579059829059829e-05,
|
|
"loss": 0.1546,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 3.584,
|
|
"grad_norm": 33.78230285644531,
|
|
"learning_rate": 3.561253561253561e-05,
|
|
"loss": 0.1769,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 3.616,
|
|
"grad_norm": 29.797826766967773,
|
|
"learning_rate": 3.5434472934472935e-05,
|
|
"loss": 0.1477,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 3.648,
|
|
"grad_norm": 11.370080947875977,
|
|
"learning_rate": 3.525641025641026e-05,
|
|
"loss": 0.1749,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 3.68,
|
|
"grad_norm": 6.187923908233643,
|
|
"learning_rate": 3.507834757834758e-05,
|
|
"loss": 0.1187,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 3.7119999999999997,
|
|
"grad_norm": 57.243919372558594,
|
|
"learning_rate": 3.4900284900284904e-05,
|
|
"loss": 0.1455,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 3.7439999999999998,
|
|
"grad_norm": 48.05120849609375,
|
|
"learning_rate": 3.472222222222222e-05,
|
|
"loss": 0.1792,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 3.776,
|
|
"grad_norm": 18.508798599243164,
|
|
"learning_rate": 3.454415954415954e-05,
|
|
"loss": 0.1977,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 3.808,
|
|
"grad_norm": 11.206692695617676,
|
|
"learning_rate": 3.436609686609687e-05,
|
|
"loss": 0.1752,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 3.84,
|
|
"grad_norm": 8.796914100646973,
|
|
"learning_rate": 3.418803418803419e-05,
|
|
"loss": 0.134,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 3.872,
|
|
"grad_norm": 28.23493003845215,
|
|
"learning_rate": 3.400997150997151e-05,
|
|
"loss": 0.1466,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 3.904,
|
|
"grad_norm": 24.789331436157227,
|
|
"learning_rate": 3.3831908831908834e-05,
|
|
"loss": 0.1674,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 3.936,
|
|
"grad_norm": 29.97929573059082,
|
|
"learning_rate": 3.365384615384616e-05,
|
|
"loss": 0.1626,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 3.968,
|
|
"grad_norm": 12.732221603393555,
|
|
"learning_rate": 3.347578347578348e-05,
|
|
"loss": 0.1162,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 4.0,
|
|
"grad_norm": 23.768869400024414,
|
|
"learning_rate": 3.32977207977208e-05,
|
|
"loss": 0.1382,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 4.0,
|
|
"eval_accuracy": 0.8497652582159625,
|
|
"eval_loss": 0.429149866104126,
|
|
"eval_runtime": 14.7304,
|
|
"eval_samples_per_second": 72.299,
|
|
"eval_steps_per_second": 2.308,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 4.032,
|
|
"grad_norm": 26.3099308013916,
|
|
"learning_rate": 3.311965811965812e-05,
|
|
"loss": 0.1279,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 4.064,
|
|
"grad_norm": 26.10764503479004,
|
|
"learning_rate": 3.294159544159544e-05,
|
|
"loss": 0.1868,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 4.096,
|
|
"grad_norm": 42.908058166503906,
|
|
"learning_rate": 3.2763532763532764e-05,
|
|
"loss": 0.1102,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 4.128,
|
|
"grad_norm": 18.12682342529297,
|
|
"learning_rate": 3.258547008547009e-05,
|
|
"loss": 0.1372,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 4.16,
|
|
"grad_norm": 32.7211799621582,
|
|
"learning_rate": 3.240740740740741e-05,
|
|
"loss": 0.1342,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 4.192,
|
|
"grad_norm": 43.664955139160156,
|
|
"learning_rate": 3.2229344729344726e-05,
|
|
"loss": 0.1205,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 4.224,
|
|
"grad_norm": 21.014827728271484,
|
|
"learning_rate": 3.205128205128206e-05,
|
|
"loss": 0.1328,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 4.256,
|
|
"grad_norm": 9.477145195007324,
|
|
"learning_rate": 3.1873219373219376e-05,
|
|
"loss": 0.12,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 4.288,
|
|
"grad_norm": 11.2942533493042,
|
|
"learning_rate": 3.1695156695156695e-05,
|
|
"loss": 0.1033,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 4.32,
|
|
"grad_norm": 19.64799690246582,
|
|
"learning_rate": 3.151709401709402e-05,
|
|
"loss": 0.1157,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 4.352,
|
|
"grad_norm": 26.556772232055664,
|
|
"learning_rate": 3.133903133903134e-05,
|
|
"loss": 0.1053,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 4.384,
|
|
"grad_norm": 17.614032745361328,
|
|
"learning_rate": 3.116096866096866e-05,
|
|
"loss": 0.1266,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 4.416,
|
|
"grad_norm": 6.941921710968018,
|
|
"learning_rate": 3.098290598290599e-05,
|
|
"loss": 0.1177,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 4.448,
|
|
"grad_norm": 12.218695640563965,
|
|
"learning_rate": 3.080484330484331e-05,
|
|
"loss": 0.1209,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 4.48,
|
|
"grad_norm": 43.25284194946289,
|
|
"learning_rate": 3.0626780626780625e-05,
|
|
"loss": 0.1346,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 4.5120000000000005,
|
|
"grad_norm": 13.644841194152832,
|
|
"learning_rate": 3.0448717948717947e-05,
|
|
"loss": 0.1156,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 4.5440000000000005,
|
|
"grad_norm": 66.71968078613281,
|
|
"learning_rate": 3.0270655270655275e-05,
|
|
"loss": 0.1197,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 4.576,
|
|
"grad_norm": 59.87635040283203,
|
|
"learning_rate": 3.0092592592592593e-05,
|
|
"loss": 0.1105,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 4.608,
|
|
"grad_norm": 10.875226020812988,
|
|
"learning_rate": 2.9914529914529915e-05,
|
|
"loss": 0.0932,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 4.64,
|
|
"grad_norm": 21.233959197998047,
|
|
"learning_rate": 2.9736467236467237e-05,
|
|
"loss": 0.0914,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 4.672,
|
|
"grad_norm": 25.02607536315918,
|
|
"learning_rate": 2.9558404558404562e-05,
|
|
"loss": 0.1175,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 4.704,
|
|
"grad_norm": 9.968689918518066,
|
|
"learning_rate": 2.9380341880341884e-05,
|
|
"loss": 0.1111,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 4.736,
|
|
"grad_norm": 19.770673751831055,
|
|
"learning_rate": 2.9202279202279202e-05,
|
|
"loss": 0.1627,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 4.768,
|
|
"grad_norm": 16.976701736450195,
|
|
"learning_rate": 2.9024216524216524e-05,
|
|
"loss": 0.1355,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 4.8,
|
|
"grad_norm": 32.00942611694336,
|
|
"learning_rate": 2.8846153846153845e-05,
|
|
"loss": 0.1339,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 4.832,
|
|
"grad_norm": 12.106451034545898,
|
|
"learning_rate": 2.866809116809117e-05,
|
|
"loss": 0.1192,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 4.864,
|
|
"grad_norm": 20.953189849853516,
|
|
"learning_rate": 2.8490028490028492e-05,
|
|
"loss": 0.1295,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 4.896,
|
|
"grad_norm": 18.536453247070312,
|
|
"learning_rate": 2.8311965811965814e-05,
|
|
"loss": 0.101,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 4.928,
|
|
"grad_norm": 14.893009185791016,
|
|
"learning_rate": 2.8133903133903132e-05,
|
|
"loss": 0.112,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 4.96,
|
|
"grad_norm": 10.388225555419922,
|
|
"learning_rate": 2.795584045584046e-05,
|
|
"loss": 0.1166,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 4.992,
|
|
"grad_norm": 15.933597564697266,
|
|
"learning_rate": 2.777777777777778e-05,
|
|
"loss": 0.1058,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 4.9984,
|
|
"eval_accuracy": 0.8666666666666667,
|
|
"eval_loss": 0.39110323786735535,
|
|
"eval_runtime": 14.8044,
|
|
"eval_samples_per_second": 71.938,
|
|
"eval_steps_per_second": 2.297,
|
|
"step": 1562
|
|
},
|
|
{
|
|
"epoch": 5.024,
|
|
"grad_norm": 21.4670352935791,
|
|
"learning_rate": 2.75997150997151e-05,
|
|
"loss": 0.1265,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 5.056,
|
|
"grad_norm": 22.757436752319336,
|
|
"learning_rate": 2.7421652421652423e-05,
|
|
"loss": 0.1,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 5.088,
|
|
"grad_norm": 11.63531494140625,
|
|
"learning_rate": 2.724358974358974e-05,
|
|
"loss": 0.1181,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 5.12,
|
|
"grad_norm": 11.879034996032715,
|
|
"learning_rate": 2.706552706552707e-05,
|
|
"loss": 0.1001,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 5.152,
|
|
"grad_norm": 13.50586223602295,
|
|
"learning_rate": 2.688746438746439e-05,
|
|
"loss": 0.098,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 5.184,
|
|
"grad_norm": 6.154415130615234,
|
|
"learning_rate": 2.670940170940171e-05,
|
|
"loss": 0.1005,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 5.216,
|
|
"grad_norm": 8.229863166809082,
|
|
"learning_rate": 2.653133903133903e-05,
|
|
"loss": 0.1044,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 5.248,
|
|
"grad_norm": 18.829721450805664,
|
|
"learning_rate": 2.6353276353276356e-05,
|
|
"loss": 0.1118,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 5.28,
|
|
"grad_norm": 17.462610244750977,
|
|
"learning_rate": 2.6175213675213678e-05,
|
|
"loss": 0.0809,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 5.312,
|
|
"grad_norm": 15.725268363952637,
|
|
"learning_rate": 2.5997150997151e-05,
|
|
"loss": 0.109,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 5.344,
|
|
"grad_norm": 12.376166343688965,
|
|
"learning_rate": 2.5819088319088318e-05,
|
|
"loss": 0.0775,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 5.376,
|
|
"grad_norm": 9.472005844116211,
|
|
"learning_rate": 2.564102564102564e-05,
|
|
"loss": 0.0958,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 5.408,
|
|
"grad_norm": 16.398996353149414,
|
|
"learning_rate": 2.5462962962962965e-05,
|
|
"loss": 0.0818,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 5.44,
|
|
"grad_norm": 15.111603736877441,
|
|
"learning_rate": 2.5284900284900286e-05,
|
|
"loss": 0.0945,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 5.4719999999999995,
|
|
"grad_norm": 13.802181243896484,
|
|
"learning_rate": 2.5106837606837608e-05,
|
|
"loss": 0.0694,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 5.504,
|
|
"grad_norm": 6.6055216789245605,
|
|
"learning_rate": 2.492877492877493e-05,
|
|
"loss": 0.0886,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 5.536,
|
|
"grad_norm": 17.351755142211914,
|
|
"learning_rate": 2.475071225071225e-05,
|
|
"loss": 0.0939,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 5.568,
|
|
"grad_norm": 14.629916191101074,
|
|
"learning_rate": 2.4572649572649573e-05,
|
|
"loss": 0.0749,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 5.6,
|
|
"grad_norm": 13.987975120544434,
|
|
"learning_rate": 2.4394586894586895e-05,
|
|
"loss": 0.0696,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 5.632,
|
|
"grad_norm": 19.911739349365234,
|
|
"learning_rate": 2.4216524216524217e-05,
|
|
"loss": 0.0829,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 5.664,
|
|
"grad_norm": 11.428616523742676,
|
|
"learning_rate": 2.4038461538461542e-05,
|
|
"loss": 0.0816,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 5.696,
|
|
"grad_norm": 24.872535705566406,
|
|
"learning_rate": 2.386039886039886e-05,
|
|
"loss": 0.0876,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 5.728,
|
|
"grad_norm": 6.562127113342285,
|
|
"learning_rate": 2.3682336182336185e-05,
|
|
"loss": 0.0869,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 5.76,
|
|
"grad_norm": 14.26649284362793,
|
|
"learning_rate": 2.3504273504273504e-05,
|
|
"loss": 0.1098,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 5.792,
|
|
"grad_norm": 10.148120880126953,
|
|
"learning_rate": 2.332621082621083e-05,
|
|
"loss": 0.0725,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 5.824,
|
|
"grad_norm": 31.81497573852539,
|
|
"learning_rate": 2.314814814814815e-05,
|
|
"loss": 0.0599,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 5.856,
|
|
"grad_norm": 20.058345794677734,
|
|
"learning_rate": 2.297008547008547e-05,
|
|
"loss": 0.0987,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 5.888,
|
|
"grad_norm": 23.388731002807617,
|
|
"learning_rate": 2.2792022792022794e-05,
|
|
"loss": 0.0993,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 5.92,
|
|
"grad_norm": 12.417990684509277,
|
|
"learning_rate": 2.2613960113960116e-05,
|
|
"loss": 0.0997,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 5.952,
|
|
"grad_norm": 18.029085159301758,
|
|
"learning_rate": 2.2435897435897437e-05,
|
|
"loss": 0.0875,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 5.984,
|
|
"grad_norm": 8.003488540649414,
|
|
"learning_rate": 2.225783475783476e-05,
|
|
"loss": 0.0703,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 6.0,
|
|
"eval_accuracy": 0.8028169014084507,
|
|
"eval_loss": 0.8593423366546631,
|
|
"eval_runtime": 26.4195,
|
|
"eval_samples_per_second": 40.311,
|
|
"eval_steps_per_second": 1.287,
|
|
"step": 1875
|
|
},
|
|
{
|
|
"epoch": 6.016,
|
|
"grad_norm": 13.387948036193848,
|
|
"learning_rate": 2.207977207977208e-05,
|
|
"loss": 0.0723,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 6.048,
|
|
"grad_norm": 3.795748472213745,
|
|
"learning_rate": 2.1901709401709402e-05,
|
|
"loss": 0.067,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 6.08,
|
|
"grad_norm": 15.893839836120605,
|
|
"learning_rate": 2.1723646723646724e-05,
|
|
"loss": 0.0718,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 6.112,
|
|
"grad_norm": 6.81917142868042,
|
|
"learning_rate": 2.1545584045584046e-05,
|
|
"loss": 0.0738,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 6.144,
|
|
"grad_norm": 16.609046936035156,
|
|
"learning_rate": 2.1367521367521368e-05,
|
|
"loss": 0.0698,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 6.176,
|
|
"grad_norm": 21.796428680419922,
|
|
"learning_rate": 2.1189458689458693e-05,
|
|
"loss": 0.0681,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 6.208,
|
|
"grad_norm": 9.367144584655762,
|
|
"learning_rate": 2.101139601139601e-05,
|
|
"loss": 0.0776,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 6.24,
|
|
"grad_norm": 45.68296432495117,
|
|
"learning_rate": 2.0833333333333336e-05,
|
|
"loss": 0.075,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 6.272,
|
|
"grad_norm": 22.519546508789062,
|
|
"learning_rate": 2.0655270655270654e-05,
|
|
"loss": 0.1017,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 6.304,
|
|
"grad_norm": 26.589696884155273,
|
|
"learning_rate": 2.047720797720798e-05,
|
|
"loss": 0.0699,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 6.336,
|
|
"grad_norm": 10.323519706726074,
|
|
"learning_rate": 2.02991452991453e-05,
|
|
"loss": 0.0628,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 6.368,
|
|
"grad_norm": 19.11711311340332,
|
|
"learning_rate": 2.012108262108262e-05,
|
|
"loss": 0.0812,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 6.4,
|
|
"grad_norm": 33.67527770996094,
|
|
"learning_rate": 1.9943019943019945e-05,
|
|
"loss": 0.085,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 6.432,
|
|
"grad_norm": 11.990752220153809,
|
|
"learning_rate": 1.9764957264957266e-05,
|
|
"loss": 0.0702,
|
|
"step": 2010
|
|
},
|
|
{
|
|
"epoch": 6.464,
|
|
"grad_norm": 30.36664581298828,
|
|
"learning_rate": 1.9586894586894588e-05,
|
|
"loss": 0.0628,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 6.496,
|
|
"grad_norm": 21.322391510009766,
|
|
"learning_rate": 1.940883190883191e-05,
|
|
"loss": 0.0719,
|
|
"step": 2030
|
|
},
|
|
{
|
|
"epoch": 6.5280000000000005,
|
|
"grad_norm": 29.107898712158203,
|
|
"learning_rate": 1.923076923076923e-05,
|
|
"loss": 0.0525,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 6.5600000000000005,
|
|
"grad_norm": 17.11159324645996,
|
|
"learning_rate": 1.9052706552706553e-05,
|
|
"loss": 0.0687,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 6.592,
|
|
"grad_norm": 12.946314811706543,
|
|
"learning_rate": 1.8874643874643875e-05,
|
|
"loss": 0.0867,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 6.624,
|
|
"grad_norm": 7.321907997131348,
|
|
"learning_rate": 1.8696581196581197e-05,
|
|
"loss": 0.0661,
|
|
"step": 2070
|
|
},
|
|
{
|
|
"epoch": 6.656,
|
|
"grad_norm": 34.47880172729492,
|
|
"learning_rate": 1.8518518518518518e-05,
|
|
"loss": 0.0594,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 6.688,
|
|
"grad_norm": 10.502516746520996,
|
|
"learning_rate": 1.8340455840455843e-05,
|
|
"loss": 0.0545,
|
|
"step": 2090
|
|
},
|
|
{
|
|
"epoch": 6.72,
|
|
"grad_norm": 24.073810577392578,
|
|
"learning_rate": 1.8162393162393162e-05,
|
|
"loss": 0.0592,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 6.752,
|
|
"grad_norm": 25.363536834716797,
|
|
"learning_rate": 1.7984330484330487e-05,
|
|
"loss": 0.0767,
|
|
"step": 2110
|
|
},
|
|
{
|
|
"epoch": 6.784,
|
|
"grad_norm": 15.268250465393066,
|
|
"learning_rate": 1.7806267806267805e-05,
|
|
"loss": 0.069,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 6.816,
|
|
"grad_norm": 8.48227596282959,
|
|
"learning_rate": 1.762820512820513e-05,
|
|
"loss": 0.0625,
|
|
"step": 2130
|
|
},
|
|
{
|
|
"epoch": 6.848,
|
|
"grad_norm": 10.944808006286621,
|
|
"learning_rate": 1.7450142450142452e-05,
|
|
"loss": 0.0729,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 6.88,
|
|
"grad_norm": 12.689846992492676,
|
|
"learning_rate": 1.727207977207977e-05,
|
|
"loss": 0.0634,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 6.912,
|
|
"grad_norm": 6.600920677185059,
|
|
"learning_rate": 1.7094017094017095e-05,
|
|
"loss": 0.0758,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 6.944,
|
|
"grad_norm": 19.84024429321289,
|
|
"learning_rate": 1.6915954415954417e-05,
|
|
"loss": 0.0565,
|
|
"step": 2170
|
|
},
|
|
{
|
|
"epoch": 6.976,
|
|
"grad_norm": 8.632323265075684,
|
|
"learning_rate": 1.673789173789174e-05,
|
|
"loss": 0.0671,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 6.9984,
|
|
"eval_accuracy": 0.8892018779342723,
|
|
"eval_loss": 0.3630879521369934,
|
|
"eval_runtime": 14.8354,
|
|
"eval_samples_per_second": 71.788,
|
|
"eval_steps_per_second": 2.292,
|
|
"step": 2187
|
|
},
|
|
{
|
|
"epoch": 7.008,
|
|
"grad_norm": 7.642409324645996,
|
|
"learning_rate": 1.655982905982906e-05,
|
|
"loss": 0.0489,
|
|
"step": 2190
|
|
},
|
|
{
|
|
"epoch": 7.04,
|
|
"grad_norm": 8.677549362182617,
|
|
"learning_rate": 1.6381766381766382e-05,
|
|
"loss": 0.0596,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 7.072,
|
|
"grad_norm": 20.24688148498535,
|
|
"learning_rate": 1.6203703703703704e-05,
|
|
"loss": 0.0546,
|
|
"step": 2210
|
|
},
|
|
{
|
|
"epoch": 7.104,
|
|
"grad_norm": 19.509143829345703,
|
|
"learning_rate": 1.602564102564103e-05,
|
|
"loss": 0.0581,
|
|
"step": 2220
|
|
},
|
|
{
|
|
"epoch": 7.136,
|
|
"grad_norm": 26.461017608642578,
|
|
"learning_rate": 1.5847578347578347e-05,
|
|
"loss": 0.0857,
|
|
"step": 2230
|
|
},
|
|
{
|
|
"epoch": 7.168,
|
|
"grad_norm": 14.442258834838867,
|
|
"learning_rate": 1.566951566951567e-05,
|
|
"loss": 0.0688,
|
|
"step": 2240
|
|
},
|
|
{
|
|
"epoch": 7.2,
|
|
"grad_norm": 8.91716194152832,
|
|
"learning_rate": 1.5491452991452994e-05,
|
|
"loss": 0.079,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 7.232,
|
|
"grad_norm": 15.394043922424316,
|
|
"learning_rate": 1.5313390313390312e-05,
|
|
"loss": 0.0561,
|
|
"step": 2260
|
|
},
|
|
{
|
|
"epoch": 7.264,
|
|
"grad_norm": 19.662343978881836,
|
|
"learning_rate": 1.5135327635327638e-05,
|
|
"loss": 0.0581,
|
|
"step": 2270
|
|
},
|
|
{
|
|
"epoch": 7.296,
|
|
"grad_norm": 8.76469898223877,
|
|
"learning_rate": 1.4957264957264958e-05,
|
|
"loss": 0.0439,
|
|
"step": 2280
|
|
},
|
|
{
|
|
"epoch": 7.328,
|
|
"grad_norm": 10.02730941772461,
|
|
"learning_rate": 1.4779202279202281e-05,
|
|
"loss": 0.0547,
|
|
"step": 2290
|
|
},
|
|
{
|
|
"epoch": 7.36,
|
|
"grad_norm": 21.142120361328125,
|
|
"learning_rate": 1.4601139601139601e-05,
|
|
"loss": 0.0618,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 7.392,
|
|
"grad_norm": 8.849529266357422,
|
|
"learning_rate": 1.4423076923076923e-05,
|
|
"loss": 0.0601,
|
|
"step": 2310
|
|
},
|
|
{
|
|
"epoch": 7.424,
|
|
"grad_norm": 13.313969612121582,
|
|
"learning_rate": 1.4245014245014246e-05,
|
|
"loss": 0.0578,
|
|
"step": 2320
|
|
},
|
|
{
|
|
"epoch": 7.456,
|
|
"grad_norm": 17.729047775268555,
|
|
"learning_rate": 1.4066951566951566e-05,
|
|
"loss": 0.0593,
|
|
"step": 2330
|
|
},
|
|
{
|
|
"epoch": 7.4879999999999995,
|
|
"grad_norm": 13.839546203613281,
|
|
"learning_rate": 1.388888888888889e-05,
|
|
"loss": 0.0566,
|
|
"step": 2340
|
|
},
|
|
{
|
|
"epoch": 7.52,
|
|
"grad_norm": 6.299365997314453,
|
|
"learning_rate": 1.3710826210826211e-05,
|
|
"loss": 0.0469,
|
|
"step": 2350
|
|
},
|
|
{
|
|
"epoch": 7.552,
|
|
"grad_norm": 12.264620780944824,
|
|
"learning_rate": 1.3532763532763535e-05,
|
|
"loss": 0.0476,
|
|
"step": 2360
|
|
},
|
|
{
|
|
"epoch": 7.584,
|
|
"grad_norm": 13.175020217895508,
|
|
"learning_rate": 1.3354700854700855e-05,
|
|
"loss": 0.0483,
|
|
"step": 2370
|
|
},
|
|
{
|
|
"epoch": 7.616,
|
|
"grad_norm": 8.695143699645996,
|
|
"learning_rate": 1.3176638176638178e-05,
|
|
"loss": 0.0676,
|
|
"step": 2380
|
|
},
|
|
{
|
|
"epoch": 7.648,
|
|
"grad_norm": 7.434255599975586,
|
|
"learning_rate": 1.29985754985755e-05,
|
|
"loss": 0.0514,
|
|
"step": 2390
|
|
},
|
|
{
|
|
"epoch": 7.68,
|
|
"grad_norm": 11.564408302307129,
|
|
"learning_rate": 1.282051282051282e-05,
|
|
"loss": 0.0496,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 7.712,
|
|
"grad_norm": 15.317635536193848,
|
|
"learning_rate": 1.2642450142450143e-05,
|
|
"loss": 0.0723,
|
|
"step": 2410
|
|
},
|
|
{
|
|
"epoch": 7.744,
|
|
"grad_norm": 5.177554130554199,
|
|
"learning_rate": 1.2464387464387465e-05,
|
|
"loss": 0.0508,
|
|
"step": 2420
|
|
},
|
|
{
|
|
"epoch": 7.776,
|
|
"grad_norm": 21.86553382873535,
|
|
"learning_rate": 1.2286324786324787e-05,
|
|
"loss": 0.0531,
|
|
"step": 2430
|
|
},
|
|
{
|
|
"epoch": 7.808,
|
|
"grad_norm": 5.066462516784668,
|
|
"learning_rate": 1.2108262108262108e-05,
|
|
"loss": 0.0601,
|
|
"step": 2440
|
|
},
|
|
{
|
|
"epoch": 7.84,
|
|
"grad_norm": 3.860797643661499,
|
|
"learning_rate": 1.193019943019943e-05,
|
|
"loss": 0.0529,
|
|
"step": 2450
|
|
},
|
|
{
|
|
"epoch": 7.872,
|
|
"grad_norm": 27.842437744140625,
|
|
"learning_rate": 1.1752136752136752e-05,
|
|
"loss": 0.0736,
|
|
"step": 2460
|
|
},
|
|
{
|
|
"epoch": 7.904,
|
|
"grad_norm": 26.736413955688477,
|
|
"learning_rate": 1.1574074074074075e-05,
|
|
"loss": 0.0589,
|
|
"step": 2470
|
|
},
|
|
{
|
|
"epoch": 7.936,
|
|
"grad_norm": 25.008304595947266,
|
|
"learning_rate": 1.1396011396011397e-05,
|
|
"loss": 0.0549,
|
|
"step": 2480
|
|
},
|
|
{
|
|
"epoch": 7.968,
|
|
"grad_norm": 4.6437506675720215,
|
|
"learning_rate": 1.1217948717948719e-05,
|
|
"loss": 0.0533,
|
|
"step": 2490
|
|
},
|
|
{
|
|
"epoch": 8.0,
|
|
"grad_norm": 26.05324935913086,
|
|
"learning_rate": 1.103988603988604e-05,
|
|
"loss": 0.0457,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 8.0,
|
|
"eval_accuracy": 0.8338028169014085,
|
|
"eval_loss": 0.7270519137382507,
|
|
"eval_runtime": 16.0951,
|
|
"eval_samples_per_second": 66.169,
|
|
"eval_steps_per_second": 2.112,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 8.032,
|
|
"grad_norm": 6.876763820648193,
|
|
"learning_rate": 1.0861823361823362e-05,
|
|
"loss": 0.0392,
|
|
"step": 2510
|
|
},
|
|
{
|
|
"epoch": 8.064,
|
|
"grad_norm": 9.069485664367676,
|
|
"learning_rate": 1.0683760683760684e-05,
|
|
"loss": 0.059,
|
|
"step": 2520
|
|
},
|
|
{
|
|
"epoch": 8.096,
|
|
"grad_norm": 14.368523597717285,
|
|
"learning_rate": 1.0505698005698005e-05,
|
|
"loss": 0.0455,
|
|
"step": 2530
|
|
},
|
|
{
|
|
"epoch": 8.128,
|
|
"grad_norm": 10.847954750061035,
|
|
"learning_rate": 1.0327635327635327e-05,
|
|
"loss": 0.0368,
|
|
"step": 2540
|
|
},
|
|
{
|
|
"epoch": 8.16,
|
|
"grad_norm": 7.805965900421143,
|
|
"learning_rate": 1.014957264957265e-05,
|
|
"loss": 0.0374,
|
|
"step": 2550
|
|
},
|
|
{
|
|
"epoch": 8.192,
|
|
"grad_norm": 37.152957916259766,
|
|
"learning_rate": 9.971509971509972e-06,
|
|
"loss": 0.0434,
|
|
"step": 2560
|
|
},
|
|
{
|
|
"epoch": 8.224,
|
|
"grad_norm": 32.26515197753906,
|
|
"learning_rate": 9.793447293447294e-06,
|
|
"loss": 0.0538,
|
|
"step": 2570
|
|
},
|
|
{
|
|
"epoch": 8.256,
|
|
"grad_norm": 9.871603012084961,
|
|
"learning_rate": 9.615384615384616e-06,
|
|
"loss": 0.0443,
|
|
"step": 2580
|
|
},
|
|
{
|
|
"epoch": 8.288,
|
|
"grad_norm": 18.939363479614258,
|
|
"learning_rate": 9.437321937321937e-06,
|
|
"loss": 0.0444,
|
|
"step": 2590
|
|
},
|
|
{
|
|
"epoch": 8.32,
|
|
"grad_norm": 8.318560600280762,
|
|
"learning_rate": 9.259259259259259e-06,
|
|
"loss": 0.0367,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 8.352,
|
|
"grad_norm": 10.871614456176758,
|
|
"learning_rate": 9.081196581196581e-06,
|
|
"loss": 0.0493,
|
|
"step": 2610
|
|
},
|
|
{
|
|
"epoch": 8.384,
|
|
"grad_norm": 5.46032190322876,
|
|
"learning_rate": 8.903133903133903e-06,
|
|
"loss": 0.0413,
|
|
"step": 2620
|
|
},
|
|
{
|
|
"epoch": 8.416,
|
|
"grad_norm": 1.5709623098373413,
|
|
"learning_rate": 8.725071225071226e-06,
|
|
"loss": 0.0658,
|
|
"step": 2630
|
|
},
|
|
{
|
|
"epoch": 8.448,
|
|
"grad_norm": 26.417600631713867,
|
|
"learning_rate": 8.547008547008548e-06,
|
|
"loss": 0.0681,
|
|
"step": 2640
|
|
},
|
|
{
|
|
"epoch": 8.48,
|
|
"grad_norm": 9.628387451171875,
|
|
"learning_rate": 8.36894586894587e-06,
|
|
"loss": 0.0399,
|
|
"step": 2650
|
|
},
|
|
{
|
|
"epoch": 8.512,
|
|
"grad_norm": 2.8144702911376953,
|
|
"learning_rate": 8.190883190883191e-06,
|
|
"loss": 0.0435,
|
|
"step": 2660
|
|
},
|
|
{
|
|
"epoch": 8.544,
|
|
"grad_norm": 13.873322486877441,
|
|
"learning_rate": 8.012820512820515e-06,
|
|
"loss": 0.0501,
|
|
"step": 2670
|
|
},
|
|
{
|
|
"epoch": 8.576,
|
|
"grad_norm": 4.96248722076416,
|
|
"learning_rate": 7.834757834757835e-06,
|
|
"loss": 0.0423,
|
|
"step": 2680
|
|
},
|
|
{
|
|
"epoch": 8.608,
|
|
"grad_norm": 12.236059188842773,
|
|
"learning_rate": 7.656695156695156e-06,
|
|
"loss": 0.0488,
|
|
"step": 2690
|
|
},
|
|
{
|
|
"epoch": 8.64,
|
|
"grad_norm": 6.332564353942871,
|
|
"learning_rate": 7.478632478632479e-06,
|
|
"loss": 0.0437,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 8.672,
|
|
"grad_norm": 7.448380470275879,
|
|
"learning_rate": 7.3005698005698005e-06,
|
|
"loss": 0.035,
|
|
"step": 2710
|
|
},
|
|
{
|
|
"epoch": 8.704,
|
|
"grad_norm": 7.228295803070068,
|
|
"learning_rate": 7.122507122507123e-06,
|
|
"loss": 0.0457,
|
|
"step": 2720
|
|
},
|
|
{
|
|
"epoch": 8.736,
|
|
"grad_norm": 5.306349277496338,
|
|
"learning_rate": 6.944444444444445e-06,
|
|
"loss": 0.0496,
|
|
"step": 2730
|
|
},
|
|
{
|
|
"epoch": 8.768,
|
|
"grad_norm": 15.959365844726562,
|
|
"learning_rate": 6.766381766381767e-06,
|
|
"loss": 0.04,
|
|
"step": 2740
|
|
},
|
|
{
|
|
"epoch": 8.8,
|
|
"grad_norm": 13.594867706298828,
|
|
"learning_rate": 6.588319088319089e-06,
|
|
"loss": 0.0466,
|
|
"step": 2750
|
|
},
|
|
{
|
|
"epoch": 8.832,
|
|
"grad_norm": 7.143582820892334,
|
|
"learning_rate": 6.41025641025641e-06,
|
|
"loss": 0.0367,
|
|
"step": 2760
|
|
},
|
|
{
|
|
"epoch": 8.864,
|
|
"grad_norm": 10.158649444580078,
|
|
"learning_rate": 6.2321937321937325e-06,
|
|
"loss": 0.0442,
|
|
"step": 2770
|
|
},
|
|
{
|
|
"epoch": 8.896,
|
|
"grad_norm": 4.369603633880615,
|
|
"learning_rate": 6.054131054131054e-06,
|
|
"loss": 0.0352,
|
|
"step": 2780
|
|
},
|
|
{
|
|
"epoch": 8.928,
|
|
"grad_norm": 11.626177787780762,
|
|
"learning_rate": 5.876068376068376e-06,
|
|
"loss": 0.0414,
|
|
"step": 2790
|
|
},
|
|
{
|
|
"epoch": 8.96,
|
|
"grad_norm": 8.576985359191895,
|
|
"learning_rate": 5.6980056980056985e-06,
|
|
"loss": 0.0445,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 8.992,
|
|
"grad_norm": 11.782782554626465,
|
|
"learning_rate": 5.51994301994302e-06,
|
|
"loss": 0.0396,
|
|
"step": 2810
|
|
},
|
|
{
|
|
"epoch": 8.9984,
|
|
"eval_accuracy": 0.8826291079812206,
|
|
"eval_loss": 0.46553394198417664,
|
|
"eval_runtime": 16.3312,
|
|
"eval_samples_per_second": 65.213,
|
|
"eval_steps_per_second": 2.082,
|
|
"step": 2812
|
|
},
|
|
{
|
|
"epoch": 9.024,
|
|
"grad_norm": 9.517723083496094,
|
|
"learning_rate": 5.341880341880342e-06,
|
|
"loss": 0.0281,
|
|
"step": 2820
|
|
},
|
|
{
|
|
"epoch": 9.056,
|
|
"grad_norm": 11.273005485534668,
|
|
"learning_rate": 5.163817663817664e-06,
|
|
"loss": 0.0395,
|
|
"step": 2830
|
|
},
|
|
{
|
|
"epoch": 9.088,
|
|
"grad_norm": 12.864770889282227,
|
|
"learning_rate": 4.985754985754986e-06,
|
|
"loss": 0.0399,
|
|
"step": 2840
|
|
},
|
|
{
|
|
"epoch": 9.12,
|
|
"grad_norm": 11.933670997619629,
|
|
"learning_rate": 4.807692307692308e-06,
|
|
"loss": 0.0381,
|
|
"step": 2850
|
|
},
|
|
{
|
|
"epoch": 9.152,
|
|
"grad_norm": 21.274028778076172,
|
|
"learning_rate": 4.6296296296296296e-06,
|
|
"loss": 0.03,
|
|
"step": 2860
|
|
},
|
|
{
|
|
"epoch": 9.184,
|
|
"grad_norm": 6.597903251647949,
|
|
"learning_rate": 4.451566951566951e-06,
|
|
"loss": 0.042,
|
|
"step": 2870
|
|
},
|
|
{
|
|
"epoch": 9.216,
|
|
"grad_norm": 1.0557407140731812,
|
|
"learning_rate": 4.273504273504274e-06,
|
|
"loss": 0.0268,
|
|
"step": 2880
|
|
},
|
|
{
|
|
"epoch": 9.248,
|
|
"grad_norm": 11.865984916687012,
|
|
"learning_rate": 4.0954415954415956e-06,
|
|
"loss": 0.0301,
|
|
"step": 2890
|
|
},
|
|
{
|
|
"epoch": 9.28,
|
|
"grad_norm": 25.20126724243164,
|
|
"learning_rate": 3.917378917378917e-06,
|
|
"loss": 0.0383,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 9.312,
|
|
"grad_norm": 11.134941101074219,
|
|
"learning_rate": 3.7393162393162394e-06,
|
|
"loss": 0.0375,
|
|
"step": 2910
|
|
},
|
|
{
|
|
"epoch": 9.344,
|
|
"grad_norm": 2.754523992538452,
|
|
"learning_rate": 3.5612535612535615e-06,
|
|
"loss": 0.043,
|
|
"step": 2920
|
|
},
|
|
{
|
|
"epoch": 9.376,
|
|
"grad_norm": 12.286087989807129,
|
|
"learning_rate": 3.3831908831908837e-06,
|
|
"loss": 0.035,
|
|
"step": 2930
|
|
},
|
|
{
|
|
"epoch": 9.408,
|
|
"grad_norm": 3.8736507892608643,
|
|
"learning_rate": 3.205128205128205e-06,
|
|
"loss": 0.0391,
|
|
"step": 2940
|
|
},
|
|
{
|
|
"epoch": 9.44,
|
|
"grad_norm": 22.06711769104004,
|
|
"learning_rate": 3.027065527065527e-06,
|
|
"loss": 0.0377,
|
|
"step": 2950
|
|
},
|
|
{
|
|
"epoch": 9.472,
|
|
"grad_norm": 8.435704231262207,
|
|
"learning_rate": 2.8490028490028492e-06,
|
|
"loss": 0.0301,
|
|
"step": 2960
|
|
},
|
|
{
|
|
"epoch": 9.504,
|
|
"grad_norm": 27.129562377929688,
|
|
"learning_rate": 2.670940170940171e-06,
|
|
"loss": 0.039,
|
|
"step": 2970
|
|
},
|
|
{
|
|
"epoch": 9.536,
|
|
"grad_norm": 4.00566291809082,
|
|
"learning_rate": 2.492877492877493e-06,
|
|
"loss": 0.0261,
|
|
"step": 2980
|
|
},
|
|
{
|
|
"epoch": 9.568,
|
|
"grad_norm": 18.572261810302734,
|
|
"learning_rate": 2.3148148148148148e-06,
|
|
"loss": 0.036,
|
|
"step": 2990
|
|
},
|
|
{
|
|
"epoch": 9.6,
|
|
"grad_norm": 6.907130241394043,
|
|
"learning_rate": 2.136752136752137e-06,
|
|
"loss": 0.0455,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 9.632,
|
|
"grad_norm": 3.8767590522766113,
|
|
"learning_rate": 1.9586894586894586e-06,
|
|
"loss": 0.0456,
|
|
"step": 3010
|
|
},
|
|
{
|
|
"epoch": 9.664,
|
|
"grad_norm": 12.119085311889648,
|
|
"learning_rate": 1.7806267806267808e-06,
|
|
"loss": 0.037,
|
|
"step": 3020
|
|
},
|
|
{
|
|
"epoch": 9.696,
|
|
"grad_norm": 7.650444507598877,
|
|
"learning_rate": 1.6025641025641025e-06,
|
|
"loss": 0.0363,
|
|
"step": 3030
|
|
},
|
|
{
|
|
"epoch": 9.728,
|
|
"grad_norm": 8.467825889587402,
|
|
"learning_rate": 1.4245014245014246e-06,
|
|
"loss": 0.0198,
|
|
"step": 3040
|
|
},
|
|
{
|
|
"epoch": 9.76,
|
|
"grad_norm": 19.763225555419922,
|
|
"learning_rate": 1.2464387464387465e-06,
|
|
"loss": 0.0408,
|
|
"step": 3050
|
|
},
|
|
{
|
|
"epoch": 9.792,
|
|
"grad_norm": 25.160770416259766,
|
|
"learning_rate": 1.0683760683760685e-06,
|
|
"loss": 0.0392,
|
|
"step": 3060
|
|
},
|
|
{
|
|
"epoch": 9.824,
|
|
"grad_norm": 7.482017517089844,
|
|
"learning_rate": 8.903133903133904e-07,
|
|
"loss": 0.0283,
|
|
"step": 3070
|
|
},
|
|
{
|
|
"epoch": 9.856,
|
|
"grad_norm": 7.559390544891357,
|
|
"learning_rate": 7.122507122507123e-07,
|
|
"loss": 0.0463,
|
|
"step": 3080
|
|
},
|
|
{
|
|
"epoch": 9.888,
|
|
"grad_norm": 10.798991203308105,
|
|
"learning_rate": 5.341880341880342e-07,
|
|
"loss": 0.0298,
|
|
"step": 3090
|
|
},
|
|
{
|
|
"epoch": 9.92,
|
|
"grad_norm": 3.9386239051818848,
|
|
"learning_rate": 3.5612535612535615e-07,
|
|
"loss": 0.0386,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 9.952,
|
|
"grad_norm": 15.766891479492188,
|
|
"learning_rate": 1.7806267806267808e-07,
|
|
"loss": 0.0277,
|
|
"step": 3110
|
|
},
|
|
{
|
|
"epoch": 9.984,
|
|
"grad_norm": 3.3344645500183105,
|
|
"learning_rate": 0.0,
|
|
"loss": 0.0293,
|
|
"step": 3120
|
|
},
|
|
{
|
|
"epoch": 9.984,
|
|
"eval_accuracy": 0.8610328638497653,
|
|
"eval_loss": 0.7580540180206299,
|
|
"eval_runtime": 16.5093,
|
|
"eval_samples_per_second": 64.509,
|
|
"eval_steps_per_second": 2.059,
|
|
"step": 3120
|
|
},
|
|
{
|
|
"epoch": 9.984,
|
|
"step": 3120,
|
|
"total_flos": 9.926487761391452e+18,
|
|
"train_loss": 0.15716634974934351,
|
|
"train_runtime": 142469.0271,
|
|
"train_samples_per_second": 2.808,
|
|
"train_steps_per_second": 0.022
|
|
}
|
|
],
|
|
"logging_steps": 10,
|
|
"max_steps": 3120,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 10,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 9.926487761391452e+18,
|
|
"train_batch_size": 32,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|