|
{
|
|
"best_metric": 0.9315417408943176,
|
|
"best_model_checkpoint": "./albert_sentiment_model\\checkpoint-2000",
|
|
"epoch": 2.3094688221709005,
|
|
"eval_steps": 500,
|
|
"global_step": 3000,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.007698229407236336,
|
|
"grad_norm": 25.626720428466797,
|
|
"learning_rate": 1.0000000000000002e-06,
|
|
"loss": 1.9667,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.015396458814472672,
|
|
"grad_norm": 35.37693405151367,
|
|
"learning_rate": 2.0000000000000003e-06,
|
|
"loss": 2.0464,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.023094688221709007,
|
|
"grad_norm": 22.576478958129883,
|
|
"learning_rate": 3e-06,
|
|
"loss": 1.9565,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.030792917628945343,
|
|
"grad_norm": 21.228620529174805,
|
|
"learning_rate": 4.000000000000001e-06,
|
|
"loss": 1.9974,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.03849114703618168,
|
|
"grad_norm": 27.480052947998047,
|
|
"learning_rate": 5e-06,
|
|
"loss": 1.9431,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.046189376443418015,
|
|
"grad_norm": 27.75535774230957,
|
|
"learning_rate": 6e-06,
|
|
"loss": 1.8815,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.05388760585065435,
|
|
"grad_norm": 17.684391021728516,
|
|
"learning_rate": 7.000000000000001e-06,
|
|
"loss": 1.8753,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.061585835257890686,
|
|
"grad_norm": 21.62338638305664,
|
|
"learning_rate": 8.000000000000001e-06,
|
|
"loss": 1.8629,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.06928406466512702,
|
|
"grad_norm": 43.68171691894531,
|
|
"learning_rate": 9e-06,
|
|
"loss": 1.8003,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.07698229407236336,
|
|
"grad_norm": 22.630041122436523,
|
|
"learning_rate": 1e-05,
|
|
"loss": 1.7636,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.08468052347959969,
|
|
"grad_norm": 30.946426391601562,
|
|
"learning_rate": 1.1000000000000001e-05,
|
|
"loss": 1.7074,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.09237875288683603,
|
|
"grad_norm": 30.420377731323242,
|
|
"learning_rate": 1.2e-05,
|
|
"loss": 1.6716,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.10007698229407236,
|
|
"grad_norm": 48.35504150390625,
|
|
"learning_rate": 1.3000000000000001e-05,
|
|
"loss": 1.4465,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.1077752117013087,
|
|
"grad_norm": 51.120361328125,
|
|
"learning_rate": 1.4000000000000001e-05,
|
|
"loss": 1.4337,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.11547344110854503,
|
|
"grad_norm": 29.794086456298828,
|
|
"learning_rate": 1.5e-05,
|
|
"loss": 1.3535,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.12317167051578137,
|
|
"grad_norm": 34.01353454589844,
|
|
"learning_rate": 1.6000000000000003e-05,
|
|
"loss": 1.3233,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.1308698999230177,
|
|
"grad_norm": 47.1700439453125,
|
|
"learning_rate": 1.7000000000000003e-05,
|
|
"loss": 1.342,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.13856812933025403,
|
|
"grad_norm": 34.01679611206055,
|
|
"learning_rate": 1.8e-05,
|
|
"loss": 1.4129,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.1462663587374904,
|
|
"grad_norm": 27.47703742980957,
|
|
"learning_rate": 1.9e-05,
|
|
"loss": 1.3769,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.15396458814472672,
|
|
"grad_norm": 60.44585037231445,
|
|
"learning_rate": 2e-05,
|
|
"loss": 1.3133,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.16166281755196305,
|
|
"grad_norm": 23.065670013427734,
|
|
"learning_rate": 2.1e-05,
|
|
"loss": 1.2592,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.16936104695919937,
|
|
"grad_norm": 70.744873046875,
|
|
"learning_rate": 2.2000000000000003e-05,
|
|
"loss": 1.3765,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.17705927636643573,
|
|
"grad_norm": 47.14573287963867,
|
|
"learning_rate": 2.3000000000000003e-05,
|
|
"loss": 1.3629,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.18475750577367206,
|
|
"grad_norm": 26.078250885009766,
|
|
"learning_rate": 2.4e-05,
|
|
"loss": 1.1865,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.1924557351809084,
|
|
"grad_norm": 63.99885940551758,
|
|
"learning_rate": 2.5e-05,
|
|
"loss": 1.3399,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.20015396458814472,
|
|
"grad_norm": 24.421600341796875,
|
|
"learning_rate": 2.6000000000000002e-05,
|
|
"loss": 1.1854,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.20785219399538107,
|
|
"grad_norm": 16.544498443603516,
|
|
"learning_rate": 2.7000000000000002e-05,
|
|
"loss": 1.2511,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.2155504234026174,
|
|
"grad_norm": 52.77935791015625,
|
|
"learning_rate": 2.8000000000000003e-05,
|
|
"loss": 1.2485,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.22324865280985373,
|
|
"grad_norm": 68.4757080078125,
|
|
"learning_rate": 2.9e-05,
|
|
"loss": 1.0838,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.23094688221709006,
|
|
"grad_norm": 26.108631134033203,
|
|
"learning_rate": 3e-05,
|
|
"loss": 1.3167,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.23864511162432642,
|
|
"grad_norm": 16.388147354125977,
|
|
"learning_rate": 3.1e-05,
|
|
"loss": 1.0942,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.24634334103156275,
|
|
"grad_norm": 27.521499633789062,
|
|
"learning_rate": 3.2000000000000005e-05,
|
|
"loss": 1.3823,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.2540415704387991,
|
|
"grad_norm": 18.420778274536133,
|
|
"learning_rate": 3.3e-05,
|
|
"loss": 1.1258,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.2617397998460354,
|
|
"grad_norm": 16.579042434692383,
|
|
"learning_rate": 3.4000000000000007e-05,
|
|
"loss": 1.159,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.26943802925327176,
|
|
"grad_norm": 48.20399856567383,
|
|
"learning_rate": 3.5e-05,
|
|
"loss": 1.1556,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.27713625866050806,
|
|
"grad_norm": 12.147098541259766,
|
|
"learning_rate": 3.6e-05,
|
|
"loss": 1.1309,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.2848344880677444,
|
|
"grad_norm": 52.27555847167969,
|
|
"learning_rate": 3.7e-05,
|
|
"loss": 1.3251,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.2925327174749808,
|
|
"grad_norm": 42.3845100402832,
|
|
"learning_rate": 3.8e-05,
|
|
"loss": 1.3005,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.3002309468822171,
|
|
"grad_norm": 29.475187301635742,
|
|
"learning_rate": 3.9000000000000006e-05,
|
|
"loss": 1.2515,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.30792917628945343,
|
|
"grad_norm": 60.33949279785156,
|
|
"learning_rate": 4e-05,
|
|
"loss": 1.2975,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.3156274056966898,
|
|
"grad_norm": 47.650211334228516,
|
|
"learning_rate": 4.1e-05,
|
|
"loss": 1.2688,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.3233256351039261,
|
|
"grad_norm": 20.859683990478516,
|
|
"learning_rate": 4.2e-05,
|
|
"loss": 1.2035,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.33102386451116245,
|
|
"grad_norm": 16.41029930114746,
|
|
"learning_rate": 4.3e-05,
|
|
"loss": 1.1203,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.33872209391839875,
|
|
"grad_norm": 12.632010459899902,
|
|
"learning_rate": 4.4000000000000006e-05,
|
|
"loss": 1.1197,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.3464203233256351,
|
|
"grad_norm": 36.34510803222656,
|
|
"learning_rate": 4.5e-05,
|
|
"loss": 1.2378,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.35411855273287146,
|
|
"grad_norm": 52.647926330566406,
|
|
"learning_rate": 4.600000000000001e-05,
|
|
"loss": 1.2022,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.36181678214010776,
|
|
"grad_norm": 88.51087188720703,
|
|
"learning_rate": 4.7e-05,
|
|
"loss": 1.0755,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 0.3695150115473441,
|
|
"grad_norm": 27.021543502807617,
|
|
"learning_rate": 4.8e-05,
|
|
"loss": 1.2264,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 0.3772132409545804,
|
|
"grad_norm": 13.925777435302734,
|
|
"learning_rate": 4.9e-05,
|
|
"loss": 1.0287,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 0.3849114703618168,
|
|
"grad_norm": 17.583614349365234,
|
|
"learning_rate": 5e-05,
|
|
"loss": 0.979,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.3849114703618168,
|
|
"eval_accuracy": 0.5254041570438799,
|
|
"eval_f1": 0.4643762968149376,
|
|
"eval_loss": 1.1208206415176392,
|
|
"eval_precision": 0.47090986837941123,
|
|
"eval_recall": 0.5254041570438799,
|
|
"eval_runtime": 7.9109,
|
|
"eval_samples_per_second": 328.406,
|
|
"eval_steps_per_second": 41.082,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.39260969976905313,
|
|
"grad_norm": 29.161548614501953,
|
|
"learning_rate": 4.998700285937094e-05,
|
|
"loss": 1.1457,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 0.40030792917628943,
|
|
"grad_norm": 45.45001220703125,
|
|
"learning_rate": 4.997400571874188e-05,
|
|
"loss": 1.4028,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 0.4080061585835258,
|
|
"grad_norm": 20.393016815185547,
|
|
"learning_rate": 4.9961008578112814e-05,
|
|
"loss": 1.3193,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 0.41570438799076215,
|
|
"grad_norm": 9.973402976989746,
|
|
"learning_rate": 4.9948011437483756e-05,
|
|
"loss": 1.088,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 0.42340261739799845,
|
|
"grad_norm": 41.007110595703125,
|
|
"learning_rate": 4.993501429685469e-05,
|
|
"loss": 1.3676,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 0.4311008468052348,
|
|
"grad_norm": 13.304949760437012,
|
|
"learning_rate": 4.992201715622563e-05,
|
|
"loss": 1.3313,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 0.4387990762124711,
|
|
"grad_norm": 10.015753746032715,
|
|
"learning_rate": 4.990902001559657e-05,
|
|
"loss": 1.0298,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 0.44649730561970746,
|
|
"grad_norm": 67.66326904296875,
|
|
"learning_rate": 4.989602287496751e-05,
|
|
"loss": 1.0524,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 0.4541955350269438,
|
|
"grad_norm": 28.423460006713867,
|
|
"learning_rate": 4.9883025734338444e-05,
|
|
"loss": 1.1959,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 0.4618937644341801,
|
|
"grad_norm": 18.55489158630371,
|
|
"learning_rate": 4.9870028593709386e-05,
|
|
"loss": 1.0586,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.4695919938414165,
|
|
"grad_norm": 36.054500579833984,
|
|
"learning_rate": 4.985703145308033e-05,
|
|
"loss": 1.2009,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 0.47729022324865283,
|
|
"grad_norm": 18.236879348754883,
|
|
"learning_rate": 4.984403431245126e-05,
|
|
"loss": 1.1012,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 0.48498845265588914,
|
|
"grad_norm": 20.71109962463379,
|
|
"learning_rate": 4.9831037171822204e-05,
|
|
"loss": 1.1486,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 0.4926866820631255,
|
|
"grad_norm": 28.097261428833008,
|
|
"learning_rate": 4.981804003119314e-05,
|
|
"loss": 1.1645,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 0.5003849114703618,
|
|
"grad_norm": 20.31618309020996,
|
|
"learning_rate": 4.980504289056408e-05,
|
|
"loss": 1.1256,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 0.5080831408775982,
|
|
"grad_norm": 126.53520202636719,
|
|
"learning_rate": 4.9792045749935016e-05,
|
|
"loss": 1.4823,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 0.5157813702848345,
|
|
"grad_norm": 14.448310852050781,
|
|
"learning_rate": 4.977904860930596e-05,
|
|
"loss": 1.2872,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 0.5234795996920708,
|
|
"grad_norm": 16.949066162109375,
|
|
"learning_rate": 4.976605146867689e-05,
|
|
"loss": 1.2012,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 0.5311778290993071,
|
|
"grad_norm": 14.372076034545898,
|
|
"learning_rate": 4.9753054328047835e-05,
|
|
"loss": 1.1928,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 0.5388760585065435,
|
|
"grad_norm": 76.28020477294922,
|
|
"learning_rate": 4.974005718741877e-05,
|
|
"loss": 1.1082,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.5465742879137798,
|
|
"grad_norm": 41.16048049926758,
|
|
"learning_rate": 4.972706004678971e-05,
|
|
"loss": 1.0006,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 0.5542725173210161,
|
|
"grad_norm": 64.6050033569336,
|
|
"learning_rate": 4.9714062906160646e-05,
|
|
"loss": 1.2638,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 0.5619707467282525,
|
|
"grad_norm": 35.458457946777344,
|
|
"learning_rate": 4.970106576553159e-05,
|
|
"loss": 1.0841,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 0.5696689761354888,
|
|
"grad_norm": 33.32878494262695,
|
|
"learning_rate": 4.968806862490252e-05,
|
|
"loss": 1.2451,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 0.5773672055427251,
|
|
"grad_norm": 26.29853630065918,
|
|
"learning_rate": 4.967507148427346e-05,
|
|
"loss": 1.0487,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 0.5850654349499615,
|
|
"grad_norm": 6.7931084632873535,
|
|
"learning_rate": 4.96620743436444e-05,
|
|
"loss": 1.0836,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 0.5927636643571979,
|
|
"grad_norm": 11.772964477539062,
|
|
"learning_rate": 4.9649077203015335e-05,
|
|
"loss": 0.9459,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 0.6004618937644342,
|
|
"grad_norm": 83.65714263916016,
|
|
"learning_rate": 4.9636080062386276e-05,
|
|
"loss": 1.0921,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 0.6081601231716706,
|
|
"grad_norm": 67.42829132080078,
|
|
"learning_rate": 4.962308292175721e-05,
|
|
"loss": 1.0981,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 0.6158583525789069,
|
|
"grad_norm": 40.03876876831055,
|
|
"learning_rate": 4.961008578112815e-05,
|
|
"loss": 1.1155,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.6235565819861432,
|
|
"grad_norm": 14.459259033203125,
|
|
"learning_rate": 4.9597088640499095e-05,
|
|
"loss": 1.2369,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 0.6312548113933796,
|
|
"grad_norm": 43.21743392944336,
|
|
"learning_rate": 4.9584091499870037e-05,
|
|
"loss": 1.1154,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 0.6389530408006159,
|
|
"grad_norm": 29.028535842895508,
|
|
"learning_rate": 4.957109435924097e-05,
|
|
"loss": 1.0684,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 0.6466512702078522,
|
|
"grad_norm": 3421.910400390625,
|
|
"learning_rate": 4.9558097218611907e-05,
|
|
"loss": 1.1415,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 0.6543494996150885,
|
|
"grad_norm": 50.08725357055664,
|
|
"learning_rate": 4.954510007798285e-05,
|
|
"loss": 1.5334,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 0.6620477290223249,
|
|
"grad_norm": 31.24684715270996,
|
|
"learning_rate": 4.953210293735378e-05,
|
|
"loss": 1.1275,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 0.6697459584295612,
|
|
"grad_norm": 56.685611724853516,
|
|
"learning_rate": 4.9519105796724725e-05,
|
|
"loss": 1.1105,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 0.6774441878367975,
|
|
"grad_norm": 59.59196853637695,
|
|
"learning_rate": 4.950610865609566e-05,
|
|
"loss": 1.296,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 0.6851424172440339,
|
|
"grad_norm": 59.091697692871094,
|
|
"learning_rate": 4.94931115154666e-05,
|
|
"loss": 1.0462,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 0.6928406466512702,
|
|
"grad_norm": 25.288618087768555,
|
|
"learning_rate": 4.948011437483754e-05,
|
|
"loss": 0.9684,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.7005388760585065,
|
|
"grad_norm": 34.281044006347656,
|
|
"learning_rate": 4.946711723420848e-05,
|
|
"loss": 1.0789,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 0.7082371054657429,
|
|
"grad_norm": 12.016758918762207,
|
|
"learning_rate": 4.945412009357941e-05,
|
|
"loss": 1.1008,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 0.7159353348729792,
|
|
"grad_norm": 23.659439086914062,
|
|
"learning_rate": 4.9441122952950355e-05,
|
|
"loss": 1.3602,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 0.7236335642802155,
|
|
"grad_norm": 12.893980026245117,
|
|
"learning_rate": 4.942812581232129e-05,
|
|
"loss": 1.3067,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 0.7313317936874519,
|
|
"grad_norm": 36.1422004699707,
|
|
"learning_rate": 4.941512867169223e-05,
|
|
"loss": 1.2795,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 0.7390300230946882,
|
|
"grad_norm": 24.229007720947266,
|
|
"learning_rate": 4.940213153106317e-05,
|
|
"loss": 0.8846,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 0.7467282525019245,
|
|
"grad_norm": 12.185916900634766,
|
|
"learning_rate": 4.938913439043411e-05,
|
|
"loss": 1.0618,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 0.7544264819091608,
|
|
"grad_norm": 58.11173629760742,
|
|
"learning_rate": 4.9376137249805044e-05,
|
|
"loss": 1.0567,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 0.7621247113163973,
|
|
"grad_norm": 17.494789123535156,
|
|
"learning_rate": 4.936314010917598e-05,
|
|
"loss": 0.9427,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 0.7698229407236336,
|
|
"grad_norm": 21.812406539916992,
|
|
"learning_rate": 4.935014296854692e-05,
|
|
"loss": 1.0871,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.7698229407236336,
|
|
"eval_accuracy": 0.5400307929176289,
|
|
"eval_f1": 0.5212757553318326,
|
|
"eval_loss": 1.0452667474746704,
|
|
"eval_precision": 0.5391802513141176,
|
|
"eval_recall": 0.5400307929176289,
|
|
"eval_runtime": 7.9557,
|
|
"eval_samples_per_second": 326.559,
|
|
"eval_steps_per_second": 40.851,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.7775211701308699,
|
|
"grad_norm": 68.46436309814453,
|
|
"learning_rate": 4.9337145827917855e-05,
|
|
"loss": 1.3711,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 0.7852193995381063,
|
|
"grad_norm": 25.05400848388672,
|
|
"learning_rate": 4.9324148687288804e-05,
|
|
"loss": 1.1931,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 0.7929176289453426,
|
|
"grad_norm": 24.60456085205078,
|
|
"learning_rate": 4.931115154665974e-05,
|
|
"loss": 1.1109,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 0.8006158583525789,
|
|
"grad_norm": 24.26364517211914,
|
|
"learning_rate": 4.929815440603068e-05,
|
|
"loss": 1.0804,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 0.8083140877598153,
|
|
"grad_norm": 22.994844436645508,
|
|
"learning_rate": 4.9285157265401615e-05,
|
|
"loss": 0.8045,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 0.8160123171670516,
|
|
"grad_norm": 16.76064682006836,
|
|
"learning_rate": 4.927216012477256e-05,
|
|
"loss": 1.1683,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 0.8237105465742879,
|
|
"grad_norm": 46.755393981933594,
|
|
"learning_rate": 4.925916298414349e-05,
|
|
"loss": 1.1455,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 0.8314087759815243,
|
|
"grad_norm": 46.636077880859375,
|
|
"learning_rate": 4.924616584351443e-05,
|
|
"loss": 0.9044,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 0.8391070053887606,
|
|
"grad_norm": 25.199445724487305,
|
|
"learning_rate": 4.923316870288537e-05,
|
|
"loss": 1.0345,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 0.8468052347959969,
|
|
"grad_norm": 37.11675262451172,
|
|
"learning_rate": 4.9220171562256304e-05,
|
|
"loss": 1.2491,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.8545034642032333,
|
|
"grad_norm": 19.960737228393555,
|
|
"learning_rate": 4.9207174421627246e-05,
|
|
"loss": 0.9029,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 0.8622016936104696,
|
|
"grad_norm": 81.35137939453125,
|
|
"learning_rate": 4.919417728099818e-05,
|
|
"loss": 1.2947,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 0.8698999230177059,
|
|
"grad_norm": 32.889427185058594,
|
|
"learning_rate": 4.918118014036912e-05,
|
|
"loss": 1.1014,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 0.8775981524249422,
|
|
"grad_norm": 35.65376281738281,
|
|
"learning_rate": 4.916818299974006e-05,
|
|
"loss": 1.1406,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 0.8852963818321786,
|
|
"grad_norm": 309.11114501953125,
|
|
"learning_rate": 4.9155185859111e-05,
|
|
"loss": 0.9684,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 0.8929946112394149,
|
|
"grad_norm": 19.624103546142578,
|
|
"learning_rate": 4.9142188718481934e-05,
|
|
"loss": 0.9999,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 0.9006928406466512,
|
|
"grad_norm": 35.4536018371582,
|
|
"learning_rate": 4.9129191577852876e-05,
|
|
"loss": 1.3579,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 0.9083910700538876,
|
|
"grad_norm": 94.22552490234375,
|
|
"learning_rate": 4.911619443722381e-05,
|
|
"loss": 1.0926,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 0.9160892994611239,
|
|
"grad_norm": 23.519554138183594,
|
|
"learning_rate": 4.910319729659475e-05,
|
|
"loss": 1.1782,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 0.9237875288683602,
|
|
"grad_norm": 25.848222732543945,
|
|
"learning_rate": 4.909020015596569e-05,
|
|
"loss": 1.0085,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 0.9314857582755967,
|
|
"grad_norm": 40.45154571533203,
|
|
"learning_rate": 4.907720301533663e-05,
|
|
"loss": 1.0384,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 0.939183987682833,
|
|
"grad_norm": 26.536237716674805,
|
|
"learning_rate": 4.9064205874707564e-05,
|
|
"loss": 1.006,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 0.9468822170900693,
|
|
"grad_norm": 17.983915328979492,
|
|
"learning_rate": 4.9051208734078506e-05,
|
|
"loss": 1.1031,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 0.9545804464973057,
|
|
"grad_norm": 46.20643615722656,
|
|
"learning_rate": 4.903821159344945e-05,
|
|
"loss": 0.9322,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 0.962278675904542,
|
|
"grad_norm": 59.26303482055664,
|
|
"learning_rate": 4.902521445282038e-05,
|
|
"loss": 1.1146,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 0.9699769053117783,
|
|
"grad_norm": 42.50138473510742,
|
|
"learning_rate": 4.9012217312191324e-05,
|
|
"loss": 1.1769,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 0.9776751347190146,
|
|
"grad_norm": 44.46087646484375,
|
|
"learning_rate": 4.899922017156226e-05,
|
|
"loss": 1.113,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 0.985373364126251,
|
|
"grad_norm": 24.701412200927734,
|
|
"learning_rate": 4.89862230309332e-05,
|
|
"loss": 1.0445,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 0.9930715935334873,
|
|
"grad_norm": 19.296676635742188,
|
|
"learning_rate": 4.8973225890304136e-05,
|
|
"loss": 1.075,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 1.0007698229407236,
|
|
"grad_norm": 24.70547866821289,
|
|
"learning_rate": 4.896022874967508e-05,
|
|
"loss": 0.8709,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 1.0084680523479599,
|
|
"grad_norm": 35.57961654663086,
|
|
"learning_rate": 4.894723160904601e-05,
|
|
"loss": 1.0378,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 1.0161662817551964,
|
|
"grad_norm": 30.505125045776367,
|
|
"learning_rate": 4.893423446841695e-05,
|
|
"loss": 0.992,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 1.0238645111624327,
|
|
"grad_norm": 58.703643798828125,
|
|
"learning_rate": 4.892123732778789e-05,
|
|
"loss": 1.1885,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 1.031562740569669,
|
|
"grad_norm": 28.3648681640625,
|
|
"learning_rate": 4.8908240187158824e-05,
|
|
"loss": 1.2337,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 1.0392609699769053,
|
|
"grad_norm": 24.309017181396484,
|
|
"learning_rate": 4.8895243046529766e-05,
|
|
"loss": 1.0578,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 1.0469591993841416,
|
|
"grad_norm": 26.426084518432617,
|
|
"learning_rate": 4.88822459059007e-05,
|
|
"loss": 0.9974,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 1.054657428791378,
|
|
"grad_norm": 27.277679443359375,
|
|
"learning_rate": 4.886924876527164e-05,
|
|
"loss": 1.0032,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 1.0623556581986142,
|
|
"grad_norm": 22.838211059570312,
|
|
"learning_rate": 4.885625162464258e-05,
|
|
"loss": 1.1148,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 1.0700538876058507,
|
|
"grad_norm": 41.72176742553711,
|
|
"learning_rate": 4.884325448401352e-05,
|
|
"loss": 0.9433,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 1.077752117013087,
|
|
"grad_norm": 72.71926879882812,
|
|
"learning_rate": 4.8830257343384455e-05,
|
|
"loss": 0.8138,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 1.0854503464203233,
|
|
"grad_norm": 20.042091369628906,
|
|
"learning_rate": 4.8817260202755396e-05,
|
|
"loss": 1.0195,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 1.0931485758275596,
|
|
"grad_norm": 20.96778678894043,
|
|
"learning_rate": 4.880426306212633e-05,
|
|
"loss": 1.0269,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 1.100846805234796,
|
|
"grad_norm": 32.662322998046875,
|
|
"learning_rate": 4.879126592149727e-05,
|
|
"loss": 0.891,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 1.1085450346420322,
|
|
"grad_norm": 19.693790435791016,
|
|
"learning_rate": 4.8778268780868215e-05,
|
|
"loss": 1.054,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 1.1162432640492688,
|
|
"grad_norm": 20.91014289855957,
|
|
"learning_rate": 4.876527164023915e-05,
|
|
"loss": 0.9135,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 1.123941493456505,
|
|
"grad_norm": 52.75801086425781,
|
|
"learning_rate": 4.875227449961009e-05,
|
|
"loss": 1.0464,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 1.1316397228637414,
|
|
"grad_norm": 42.71292495727539,
|
|
"learning_rate": 4.8739277358981026e-05,
|
|
"loss": 1.0075,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 1.1393379522709777,
|
|
"grad_norm": 21.153303146362305,
|
|
"learning_rate": 4.872628021835197e-05,
|
|
"loss": 1.2111,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 1.147036181678214,
|
|
"grad_norm": 44.54867172241211,
|
|
"learning_rate": 4.87132830777229e-05,
|
|
"loss": 0.8313,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 1.1547344110854503,
|
|
"grad_norm": 48.92774963378906,
|
|
"learning_rate": 4.8700285937093845e-05,
|
|
"loss": 1.0748,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 1.1547344110854503,
|
|
"eval_accuracy": 0.5658198614318707,
|
|
"eval_f1": 0.5450550292077301,
|
|
"eval_loss": 0.9736452102661133,
|
|
"eval_precision": 0.5647189867888521,
|
|
"eval_recall": 0.5658198614318707,
|
|
"eval_runtime": 8.0094,
|
|
"eval_samples_per_second": 324.367,
|
|
"eval_steps_per_second": 40.577,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 1.1624326404926868,
|
|
"grad_norm": 39.90788650512695,
|
|
"learning_rate": 4.868728879646478e-05,
|
|
"loss": 1.0338,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 1.170130869899923,
|
|
"grad_norm": 35.67189025878906,
|
|
"learning_rate": 4.867429165583572e-05,
|
|
"loss": 1.0185,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 1.1778290993071594,
|
|
"grad_norm": 76.6269302368164,
|
|
"learning_rate": 4.8661294515206657e-05,
|
|
"loss": 1.0013,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 1.1855273287143957,
|
|
"grad_norm": 166.01284790039062,
|
|
"learning_rate": 4.86482973745776e-05,
|
|
"loss": 1.0846,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 1.193225558121632,
|
|
"grad_norm": 26.522552490234375,
|
|
"learning_rate": 4.863530023394853e-05,
|
|
"loss": 1.012,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 1.2009237875288683,
|
|
"grad_norm": 23.47018051147461,
|
|
"learning_rate": 4.862230309331947e-05,
|
|
"loss": 0.9853,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 1.2086220169361046,
|
|
"grad_norm": 16.074838638305664,
|
|
"learning_rate": 4.860930595269041e-05,
|
|
"loss": 0.9196,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 1.2163202463433411,
|
|
"grad_norm": 21.358341217041016,
|
|
"learning_rate": 4.8596308812061345e-05,
|
|
"loss": 1.0509,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 1.2240184757505774,
|
|
"grad_norm": 28.035846710205078,
|
|
"learning_rate": 4.858331167143229e-05,
|
|
"loss": 0.9299,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 1.2317167051578137,
|
|
"grad_norm": 23.313568115234375,
|
|
"learning_rate": 4.857031453080322e-05,
|
|
"loss": 0.9581,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 1.23941493456505,
|
|
"grad_norm": 26.382869720458984,
|
|
"learning_rate": 4.8557317390174163e-05,
|
|
"loss": 0.8374,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 1.2471131639722863,
|
|
"grad_norm": 62.66410446166992,
|
|
"learning_rate": 4.85443202495451e-05,
|
|
"loss": 1.1268,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 1.2548113933795226,
|
|
"grad_norm": 32.7457160949707,
|
|
"learning_rate": 4.853132310891604e-05,
|
|
"loss": 0.7796,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 1.2625096227867592,
|
|
"grad_norm": 45.75529861450195,
|
|
"learning_rate": 4.851832596828698e-05,
|
|
"loss": 1.0265,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 1.2702078521939955,
|
|
"grad_norm": 73.32510375976562,
|
|
"learning_rate": 4.850532882765792e-05,
|
|
"loss": 1.3302,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 1.2779060816012318,
|
|
"grad_norm": 51.92557144165039,
|
|
"learning_rate": 4.849233168702886e-05,
|
|
"loss": 1.2056,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 1.285604311008468,
|
|
"grad_norm": 9.508386611938477,
|
|
"learning_rate": 4.8479334546399794e-05,
|
|
"loss": 0.8817,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 1.2933025404157044,
|
|
"grad_norm": 19.91646957397461,
|
|
"learning_rate": 4.8466337405770735e-05,
|
|
"loss": 1.1081,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 1.3010007698229407,
|
|
"grad_norm": 22.991626739501953,
|
|
"learning_rate": 4.845334026514167e-05,
|
|
"loss": 0.9953,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 1.308698999230177,
|
|
"grad_norm": 36.05852127075195,
|
|
"learning_rate": 4.844034312451261e-05,
|
|
"loss": 0.9753,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 1.3163972286374133,
|
|
"grad_norm": 30.5816593170166,
|
|
"learning_rate": 4.842734598388355e-05,
|
|
"loss": 1.0043,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 1.3240954580446498,
|
|
"grad_norm": 32.22935485839844,
|
|
"learning_rate": 4.841434884325449e-05,
|
|
"loss": 0.977,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 1.331793687451886,
|
|
"grad_norm": 93.55306243896484,
|
|
"learning_rate": 4.8401351702625424e-05,
|
|
"loss": 0.959,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 1.3394919168591224,
|
|
"grad_norm": 20.172109603881836,
|
|
"learning_rate": 4.8388354561996365e-05,
|
|
"loss": 0.8522,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 1.3471901462663587,
|
|
"grad_norm": 34.432254791259766,
|
|
"learning_rate": 4.83753574213673e-05,
|
|
"loss": 0.9968,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 1.354888375673595,
|
|
"grad_norm": 9.962430000305176,
|
|
"learning_rate": 4.836236028073824e-05,
|
|
"loss": 0.8846,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 1.3625866050808315,
|
|
"grad_norm": 16.258909225463867,
|
|
"learning_rate": 4.834936314010918e-05,
|
|
"loss": 0.9685,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 1.3702848344880678,
|
|
"grad_norm": 600.8819580078125,
|
|
"learning_rate": 4.833636599948012e-05,
|
|
"loss": 1.2891,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 1.3779830638953041,
|
|
"grad_norm": 19.667699813842773,
|
|
"learning_rate": 4.8323368858851054e-05,
|
|
"loss": 0.9624,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 1.3856812933025404,
|
|
"grad_norm": 28.33806037902832,
|
|
"learning_rate": 4.831037171822199e-05,
|
|
"loss": 0.9677,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 1.3933795227097767,
|
|
"grad_norm": 17.19879722595215,
|
|
"learning_rate": 4.829737457759293e-05,
|
|
"loss": 0.7864,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 1.401077752117013,
|
|
"grad_norm": 68.86786651611328,
|
|
"learning_rate": 4.8284377436963865e-05,
|
|
"loss": 1.0756,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 1.4087759815242493,
|
|
"grad_norm": 19.253087997436523,
|
|
"learning_rate": 4.827138029633481e-05,
|
|
"loss": 0.902,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 1.4164742109314856,
|
|
"grad_norm": 22.85369300842285,
|
|
"learning_rate": 4.825838315570575e-05,
|
|
"loss": 0.9561,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 1.4241724403387221,
|
|
"grad_norm": 34.35043716430664,
|
|
"learning_rate": 4.824538601507669e-05,
|
|
"loss": 0.938,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 1.4318706697459584,
|
|
"grad_norm": 29.028274536132812,
|
|
"learning_rate": 4.8232388874447626e-05,
|
|
"loss": 0.9659,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 1.4395688991531947,
|
|
"grad_norm": 15.733872413635254,
|
|
"learning_rate": 4.821939173381857e-05,
|
|
"loss": 0.955,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 1.447267128560431,
|
|
"grad_norm": 45.1998176574707,
|
|
"learning_rate": 4.82063945931895e-05,
|
|
"loss": 1.0997,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 1.4549653579676676,
|
|
"grad_norm": 62.95772171020508,
|
|
"learning_rate": 4.819339745256044e-05,
|
|
"loss": 0.9975,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 1.4626635873749039,
|
|
"grad_norm": 13.520893096923828,
|
|
"learning_rate": 4.818040031193138e-05,
|
|
"loss": 0.9224,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 1.4703618167821402,
|
|
"grad_norm": 44.204837799072266,
|
|
"learning_rate": 4.8167403171302314e-05,
|
|
"loss": 1.0823,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 1.4780600461893765,
|
|
"grad_norm": 18.848365783691406,
|
|
"learning_rate": 4.8154406030673256e-05,
|
|
"loss": 0.8767,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 1.4857582755966128,
|
|
"grad_norm": 48.03689193725586,
|
|
"learning_rate": 4.814140889004419e-05,
|
|
"loss": 1.1274,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 1.493456505003849,
|
|
"grad_norm": 29.64519691467285,
|
|
"learning_rate": 4.812841174941513e-05,
|
|
"loss": 0.8109,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 1.5011547344110854,
|
|
"grad_norm": 56.1988639831543,
|
|
"learning_rate": 4.811541460878607e-05,
|
|
"loss": 1.1295,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 1.5088529638183217,
|
|
"grad_norm": 39.60947036743164,
|
|
"learning_rate": 4.810241746815701e-05,
|
|
"loss": 1.1057,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 1.516551193225558,
|
|
"grad_norm": 14.454174041748047,
|
|
"learning_rate": 4.8089420327527944e-05,
|
|
"loss": 1.098,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 1.5242494226327945,
|
|
"grad_norm": 18.127788543701172,
|
|
"learning_rate": 4.8076423186898886e-05,
|
|
"loss": 0.947,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 1.5319476520400308,
|
|
"grad_norm": 30.042423248291016,
|
|
"learning_rate": 4.806342604626982e-05,
|
|
"loss": 0.8846,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 1.539645881447267,
|
|
"grad_norm": 31.532861709594727,
|
|
"learning_rate": 4.805042890564076e-05,
|
|
"loss": 0.908,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 1.539645881447267,
|
|
"eval_accuracy": 0.5873749037721324,
|
|
"eval_f1": 0.5698601671652778,
|
|
"eval_loss": 0.9315417408943176,
|
|
"eval_precision": 0.5808478011880224,
|
|
"eval_recall": 0.5873749037721324,
|
|
"eval_runtime": 8.0056,
|
|
"eval_samples_per_second": 324.524,
|
|
"eval_steps_per_second": 40.597,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 1.5473441108545036,
|
|
"grad_norm": 87.31982421875,
|
|
"learning_rate": 4.80374317650117e-05,
|
|
"loss": 1.2135,
|
|
"step": 2010
|
|
},
|
|
{
|
|
"epoch": 1.55504234026174,
|
|
"grad_norm": 36.29372787475586,
|
|
"learning_rate": 4.802443462438263e-05,
|
|
"loss": 0.9446,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 1.5627405696689762,
|
|
"grad_norm": 65.0682144165039,
|
|
"learning_rate": 4.8011437483753574e-05,
|
|
"loss": 0.942,
|
|
"step": 2030
|
|
},
|
|
{
|
|
"epoch": 1.5704387990762125,
|
|
"grad_norm": 7.867269992828369,
|
|
"learning_rate": 4.799844034312451e-05,
|
|
"loss": 0.9343,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 1.5781370284834488,
|
|
"grad_norm": 18.922204971313477,
|
|
"learning_rate": 4.798544320249546e-05,
|
|
"loss": 1.0242,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 1.5858352578906851,
|
|
"grad_norm": 22.182636260986328,
|
|
"learning_rate": 4.797244606186639e-05,
|
|
"loss": 0.8997,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 1.5935334872979214,
|
|
"grad_norm": 13.871538162231445,
|
|
"learning_rate": 4.7959448921237335e-05,
|
|
"loss": 1.0238,
|
|
"step": 2070
|
|
},
|
|
{
|
|
"epoch": 1.6012317167051577,
|
|
"grad_norm": 24.956518173217773,
|
|
"learning_rate": 4.794645178060827e-05,
|
|
"loss": 0.8555,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 1.608929946112394,
|
|
"grad_norm": 27.833717346191406,
|
|
"learning_rate": 4.793345463997921e-05,
|
|
"loss": 1.1057,
|
|
"step": 2090
|
|
},
|
|
{
|
|
"epoch": 1.6166281755196303,
|
|
"grad_norm": 15.595669746398926,
|
|
"learning_rate": 4.7920457499350146e-05,
|
|
"loss": 0.8761,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 1.6243264049268669,
|
|
"grad_norm": 33.80525207519531,
|
|
"learning_rate": 4.790746035872109e-05,
|
|
"loss": 0.9889,
|
|
"step": 2110
|
|
},
|
|
{
|
|
"epoch": 1.6320246343341032,
|
|
"grad_norm": 21.781145095825195,
|
|
"learning_rate": 4.789446321809202e-05,
|
|
"loss": 1.055,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 1.6397228637413395,
|
|
"grad_norm": 29.12350082397461,
|
|
"learning_rate": 4.788146607746296e-05,
|
|
"loss": 1.0033,
|
|
"step": 2130
|
|
},
|
|
{
|
|
"epoch": 1.647421093148576,
|
|
"grad_norm": 20.39841079711914,
|
|
"learning_rate": 4.78684689368339e-05,
|
|
"loss": 1.0698,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 1.6551193225558123,
|
|
"grad_norm": 9.309553146362305,
|
|
"learning_rate": 4.7855471796204835e-05,
|
|
"loss": 0.9092,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 1.6628175519630486,
|
|
"grad_norm": 28.19939613342285,
|
|
"learning_rate": 4.7842474655575776e-05,
|
|
"loss": 0.9727,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 1.670515781370285,
|
|
"grad_norm": 18.627004623413086,
|
|
"learning_rate": 4.782947751494671e-05,
|
|
"loss": 0.8224,
|
|
"step": 2170
|
|
},
|
|
{
|
|
"epoch": 1.6782140107775212,
|
|
"grad_norm": 15.454500198364258,
|
|
"learning_rate": 4.781648037431765e-05,
|
|
"loss": 0.9459,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 1.6859122401847575,
|
|
"grad_norm": 25.361587524414062,
|
|
"learning_rate": 4.780348323368859e-05,
|
|
"loss": 0.8389,
|
|
"step": 2190
|
|
},
|
|
{
|
|
"epoch": 1.6936104695919938,
|
|
"grad_norm": 48.25446701049805,
|
|
"learning_rate": 4.779048609305953e-05,
|
|
"loss": 1.1498,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 1.70130869899923,
|
|
"grad_norm": 45.576908111572266,
|
|
"learning_rate": 4.7777488952430465e-05,
|
|
"loss": 0.7871,
|
|
"step": 2210
|
|
},
|
|
{
|
|
"epoch": 1.7090069284064664,
|
|
"grad_norm": 28.02678871154785,
|
|
"learning_rate": 4.7764491811801407e-05,
|
|
"loss": 0.8891,
|
|
"step": 2220
|
|
},
|
|
{
|
|
"epoch": 1.7167051578137027,
|
|
"grad_norm": 17.72388458251953,
|
|
"learning_rate": 4.775149467117234e-05,
|
|
"loss": 1.0627,
|
|
"step": 2230
|
|
},
|
|
{
|
|
"epoch": 1.7244033872209392,
|
|
"grad_norm": 14.796937942504883,
|
|
"learning_rate": 4.773849753054328e-05,
|
|
"loss": 0.8403,
|
|
"step": 2240
|
|
},
|
|
{
|
|
"epoch": 1.7321016166281755,
|
|
"grad_norm": 18.26534652709961,
|
|
"learning_rate": 4.7725500389914225e-05,
|
|
"loss": 1.0399,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 1.7397998460354118,
|
|
"grad_norm": 12.606345176696777,
|
|
"learning_rate": 4.771250324928516e-05,
|
|
"loss": 0.7362,
|
|
"step": 2260
|
|
},
|
|
{
|
|
"epoch": 1.7474980754426483,
|
|
"grad_norm": 10.439709663391113,
|
|
"learning_rate": 4.76995061086561e-05,
|
|
"loss": 1.0418,
|
|
"step": 2270
|
|
},
|
|
{
|
|
"epoch": 1.7551963048498846,
|
|
"grad_norm": 24.162647247314453,
|
|
"learning_rate": 4.768650896802704e-05,
|
|
"loss": 0.9489,
|
|
"step": 2280
|
|
},
|
|
{
|
|
"epoch": 1.762894534257121,
|
|
"grad_norm": 34.678993225097656,
|
|
"learning_rate": 4.767351182739798e-05,
|
|
"loss": 0.8907,
|
|
"step": 2290
|
|
},
|
|
{
|
|
"epoch": 1.7705927636643572,
|
|
"grad_norm": 22.55309295654297,
|
|
"learning_rate": 4.7660514686768913e-05,
|
|
"loss": 0.8239,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 1.7782909930715936,
|
|
"grad_norm": 22.526445388793945,
|
|
"learning_rate": 4.7647517546139855e-05,
|
|
"loss": 1.0999,
|
|
"step": 2310
|
|
},
|
|
{
|
|
"epoch": 1.7859892224788299,
|
|
"grad_norm": 19.45115089416504,
|
|
"learning_rate": 4.763452040551079e-05,
|
|
"loss": 0.8858,
|
|
"step": 2320
|
|
},
|
|
{
|
|
"epoch": 1.7936874518860662,
|
|
"grad_norm": 29.1611270904541,
|
|
"learning_rate": 4.762152326488173e-05,
|
|
"loss": 1.0418,
|
|
"step": 2330
|
|
},
|
|
{
|
|
"epoch": 1.8013856812933025,
|
|
"grad_norm": 22.248546600341797,
|
|
"learning_rate": 4.760852612425267e-05,
|
|
"loss": 0.8722,
|
|
"step": 2340
|
|
},
|
|
{
|
|
"epoch": 1.8090839107005388,
|
|
"grad_norm": 78.22930908203125,
|
|
"learning_rate": 4.75955289836236e-05,
|
|
"loss": 0.8606,
|
|
"step": 2350
|
|
},
|
|
{
|
|
"epoch": 1.816782140107775,
|
|
"grad_norm": 16.559967041015625,
|
|
"learning_rate": 4.7582531842994544e-05,
|
|
"loss": 0.9108,
|
|
"step": 2360
|
|
},
|
|
{
|
|
"epoch": 1.8244803695150116,
|
|
"grad_norm": 20.415922164916992,
|
|
"learning_rate": 4.756953470236548e-05,
|
|
"loss": 1.0512,
|
|
"step": 2370
|
|
},
|
|
{
|
|
"epoch": 1.8321785989222479,
|
|
"grad_norm": 34.354740142822266,
|
|
"learning_rate": 4.755653756173642e-05,
|
|
"loss": 0.9482,
|
|
"step": 2380
|
|
},
|
|
{
|
|
"epoch": 1.8398768283294842,
|
|
"grad_norm": 26.14980697631836,
|
|
"learning_rate": 4.7543540421107355e-05,
|
|
"loss": 0.9213,
|
|
"step": 2390
|
|
},
|
|
{
|
|
"epoch": 1.8475750577367207,
|
|
"grad_norm": 137.22198486328125,
|
|
"learning_rate": 4.75305432804783e-05,
|
|
"loss": 1.0509,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 1.855273287143957,
|
|
"grad_norm": 82.37857818603516,
|
|
"learning_rate": 4.751754613984923e-05,
|
|
"loss": 0.8466,
|
|
"step": 2410
|
|
},
|
|
{
|
|
"epoch": 1.8629715165511933,
|
|
"grad_norm": 31.272960662841797,
|
|
"learning_rate": 4.7504548999220174e-05,
|
|
"loss": 0.9717,
|
|
"step": 2420
|
|
},
|
|
{
|
|
"epoch": 1.8706697459584296,
|
|
"grad_norm": 33.75693893432617,
|
|
"learning_rate": 4.749155185859111e-05,
|
|
"loss": 1.0565,
|
|
"step": 2430
|
|
},
|
|
{
|
|
"epoch": 1.878367975365666,
|
|
"grad_norm": 21.18593406677246,
|
|
"learning_rate": 4.747855471796205e-05,
|
|
"loss": 0.8595,
|
|
"step": 2440
|
|
},
|
|
{
|
|
"epoch": 1.8860662047729022,
|
|
"grad_norm": 23.71518898010254,
|
|
"learning_rate": 4.7465557577332985e-05,
|
|
"loss": 0.8621,
|
|
"step": 2450
|
|
},
|
|
{
|
|
"epoch": 1.8937644341801385,
|
|
"grad_norm": 51.083431243896484,
|
|
"learning_rate": 4.745256043670393e-05,
|
|
"loss": 0.9675,
|
|
"step": 2460
|
|
},
|
|
{
|
|
"epoch": 1.9014626635873748,
|
|
"grad_norm": 59.243743896484375,
|
|
"learning_rate": 4.743956329607487e-05,
|
|
"loss": 0.7884,
|
|
"step": 2470
|
|
},
|
|
{
|
|
"epoch": 1.9091608929946111,
|
|
"grad_norm": 25.169973373413086,
|
|
"learning_rate": 4.7426566155445804e-05,
|
|
"loss": 1.0393,
|
|
"step": 2480
|
|
},
|
|
{
|
|
"epoch": 1.9168591224018474,
|
|
"grad_norm": 19.93684196472168,
|
|
"learning_rate": 4.7413569014816746e-05,
|
|
"loss": 1.0029,
|
|
"step": 2490
|
|
},
|
|
{
|
|
"epoch": 1.924557351809084,
|
|
"grad_norm": 14.145678520202637,
|
|
"learning_rate": 4.740057187418768e-05,
|
|
"loss": 1.0533,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 1.924557351809084,
|
|
"eval_accuracy": 0.5569668976135489,
|
|
"eval_f1": 0.5318009748153606,
|
|
"eval_loss": 0.9681846499443054,
|
|
"eval_precision": 0.5749243005518604,
|
|
"eval_recall": 0.5569668976135489,
|
|
"eval_runtime": 8.0276,
|
|
"eval_samples_per_second": 323.634,
|
|
"eval_steps_per_second": 40.485,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 1.9322555812163202,
|
|
"grad_norm": 15.758938789367676,
|
|
"learning_rate": 4.738757473355862e-05,
|
|
"loss": 0.8101,
|
|
"step": 2510
|
|
},
|
|
{
|
|
"epoch": 1.9399538106235565,
|
|
"grad_norm": 13.67740535736084,
|
|
"learning_rate": 4.737457759292956e-05,
|
|
"loss": 0.7461,
|
|
"step": 2520
|
|
},
|
|
{
|
|
"epoch": 1.947652040030793,
|
|
"grad_norm": 15.8422269821167,
|
|
"learning_rate": 4.73615804523005e-05,
|
|
"loss": 1.0919,
|
|
"step": 2530
|
|
},
|
|
{
|
|
"epoch": 1.9553502694380294,
|
|
"grad_norm": 150.54856872558594,
|
|
"learning_rate": 4.7348583311671434e-05,
|
|
"loss": 0.8516,
|
|
"step": 2540
|
|
},
|
|
{
|
|
"epoch": 1.9630484988452657,
|
|
"grad_norm": 73.50398254394531,
|
|
"learning_rate": 4.7335586171042376e-05,
|
|
"loss": 0.9784,
|
|
"step": 2550
|
|
},
|
|
{
|
|
"epoch": 1.970746728252502,
|
|
"grad_norm": 16.76059913635254,
|
|
"learning_rate": 4.732258903041331e-05,
|
|
"loss": 0.8167,
|
|
"step": 2560
|
|
},
|
|
{
|
|
"epoch": 1.9784449576597383,
|
|
"grad_norm": 29.237688064575195,
|
|
"learning_rate": 4.730959188978425e-05,
|
|
"loss": 1.0536,
|
|
"step": 2570
|
|
},
|
|
{
|
|
"epoch": 1.9861431870669746,
|
|
"grad_norm": 21.804590225219727,
|
|
"learning_rate": 4.729659474915519e-05,
|
|
"loss": 0.9558,
|
|
"step": 2580
|
|
},
|
|
{
|
|
"epoch": 1.9938414164742109,
|
|
"grad_norm": 21.874589920043945,
|
|
"learning_rate": 4.728359760852612e-05,
|
|
"loss": 0.9748,
|
|
"step": 2590
|
|
},
|
|
{
|
|
"epoch": 2.001539645881447,
|
|
"grad_norm": 32.83283996582031,
|
|
"learning_rate": 4.7270600467897064e-05,
|
|
"loss": 1.0102,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 2.0092378752886835,
|
|
"grad_norm": 16.153419494628906,
|
|
"learning_rate": 4.7257603327268e-05,
|
|
"loss": 0.8345,
|
|
"step": 2610
|
|
},
|
|
{
|
|
"epoch": 2.0169361046959198,
|
|
"grad_norm": 54.94171142578125,
|
|
"learning_rate": 4.724460618663894e-05,
|
|
"loss": 0.8617,
|
|
"step": 2620
|
|
},
|
|
{
|
|
"epoch": 2.024634334103156,
|
|
"grad_norm": 39.80078887939453,
|
|
"learning_rate": 4.7231609046009876e-05,
|
|
"loss": 0.8463,
|
|
"step": 2630
|
|
},
|
|
{
|
|
"epoch": 2.032332563510393,
|
|
"grad_norm": 21.685455322265625,
|
|
"learning_rate": 4.721861190538082e-05,
|
|
"loss": 0.8706,
|
|
"step": 2640
|
|
},
|
|
{
|
|
"epoch": 2.040030792917629,
|
|
"grad_norm": 26.26354217529297,
|
|
"learning_rate": 4.720561476475175e-05,
|
|
"loss": 0.8641,
|
|
"step": 2650
|
|
},
|
|
{
|
|
"epoch": 2.0477290223248654,
|
|
"grad_norm": 139.40414428710938,
|
|
"learning_rate": 4.7192617624122694e-05,
|
|
"loss": 0.7537,
|
|
"step": 2660
|
|
},
|
|
{
|
|
"epoch": 2.0554272517321017,
|
|
"grad_norm": 49.84522247314453,
|
|
"learning_rate": 4.7179620483493636e-05,
|
|
"loss": 0.7604,
|
|
"step": 2670
|
|
},
|
|
{
|
|
"epoch": 2.063125481139338,
|
|
"grad_norm": 17.73415756225586,
|
|
"learning_rate": 4.716662334286457e-05,
|
|
"loss": 0.7963,
|
|
"step": 2680
|
|
},
|
|
{
|
|
"epoch": 2.0708237105465743,
|
|
"grad_norm": 16.671457290649414,
|
|
"learning_rate": 4.715362620223551e-05,
|
|
"loss": 0.8546,
|
|
"step": 2690
|
|
},
|
|
{
|
|
"epoch": 2.0785219399538106,
|
|
"grad_norm": 15.332406044006348,
|
|
"learning_rate": 4.714062906160645e-05,
|
|
"loss": 0.784,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 2.086220169361047,
|
|
"grad_norm": 76.87459564208984,
|
|
"learning_rate": 4.712763192097739e-05,
|
|
"loss": 0.9221,
|
|
"step": 2710
|
|
},
|
|
{
|
|
"epoch": 2.0939183987682832,
|
|
"grad_norm": 21.800127029418945,
|
|
"learning_rate": 4.7114634780348324e-05,
|
|
"loss": 1.0555,
|
|
"step": 2720
|
|
},
|
|
{
|
|
"epoch": 2.1016166281755195,
|
|
"grad_norm": 31.496492385864258,
|
|
"learning_rate": 4.7101637639719266e-05,
|
|
"loss": 0.8673,
|
|
"step": 2730
|
|
},
|
|
{
|
|
"epoch": 2.109314857582756,
|
|
"grad_norm": 9.187371253967285,
|
|
"learning_rate": 4.70886404990902e-05,
|
|
"loss": 0.9217,
|
|
"step": 2740
|
|
},
|
|
{
|
|
"epoch": 2.117013086989992,
|
|
"grad_norm": 19.336244583129883,
|
|
"learning_rate": 4.707564335846114e-05,
|
|
"loss": 0.8739,
|
|
"step": 2750
|
|
},
|
|
{
|
|
"epoch": 2.1247113163972284,
|
|
"grad_norm": 28.658281326293945,
|
|
"learning_rate": 4.706264621783208e-05,
|
|
"loss": 0.8937,
|
|
"step": 2760
|
|
},
|
|
{
|
|
"epoch": 2.132409545804465,
|
|
"grad_norm": 32.35395431518555,
|
|
"learning_rate": 4.704964907720302e-05,
|
|
"loss": 0.9527,
|
|
"step": 2770
|
|
},
|
|
{
|
|
"epoch": 2.1401077752117015,
|
|
"grad_norm": 39.60884094238281,
|
|
"learning_rate": 4.7036651936573955e-05,
|
|
"loss": 0.9266,
|
|
"step": 2780
|
|
},
|
|
{
|
|
"epoch": 2.147806004618938,
|
|
"grad_norm": 16.985599517822266,
|
|
"learning_rate": 4.7023654795944896e-05,
|
|
"loss": 0.9875,
|
|
"step": 2790
|
|
},
|
|
{
|
|
"epoch": 2.155504234026174,
|
|
"grad_norm": 14.943406105041504,
|
|
"learning_rate": 4.701065765531583e-05,
|
|
"loss": 0.8371,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 2.1632024634334104,
|
|
"grad_norm": 18.414188385009766,
|
|
"learning_rate": 4.699766051468677e-05,
|
|
"loss": 0.8526,
|
|
"step": 2810
|
|
},
|
|
{
|
|
"epoch": 2.1709006928406467,
|
|
"grad_norm": 12.915871620178223,
|
|
"learning_rate": 4.698466337405771e-05,
|
|
"loss": 0.9252,
|
|
"step": 2820
|
|
},
|
|
{
|
|
"epoch": 2.178598922247883,
|
|
"grad_norm": 38.03502655029297,
|
|
"learning_rate": 4.697166623342864e-05,
|
|
"loss": 0.8437,
|
|
"step": 2830
|
|
},
|
|
{
|
|
"epoch": 2.1862971516551193,
|
|
"grad_norm": 163.22010803222656,
|
|
"learning_rate": 4.6958669092799585e-05,
|
|
"loss": 0.8906,
|
|
"step": 2840
|
|
},
|
|
{
|
|
"epoch": 2.1939953810623556,
|
|
"grad_norm": 15.631921768188477,
|
|
"learning_rate": 4.694567195217052e-05,
|
|
"loss": 0.9374,
|
|
"step": 2850
|
|
},
|
|
{
|
|
"epoch": 2.201693610469592,
|
|
"grad_norm": 91.86471557617188,
|
|
"learning_rate": 4.693267481154146e-05,
|
|
"loss": 0.895,
|
|
"step": 2860
|
|
},
|
|
{
|
|
"epoch": 2.209391839876828,
|
|
"grad_norm": 43.340171813964844,
|
|
"learning_rate": 4.69196776709124e-05,
|
|
"loss": 0.9779,
|
|
"step": 2870
|
|
},
|
|
{
|
|
"epoch": 2.2170900692840645,
|
|
"grad_norm": 17.621532440185547,
|
|
"learning_rate": 4.6906680530283345e-05,
|
|
"loss": 0.9628,
|
|
"step": 2880
|
|
},
|
|
{
|
|
"epoch": 2.2247882986913012,
|
|
"grad_norm": 22.588891983032227,
|
|
"learning_rate": 4.689368338965428e-05,
|
|
"loss": 0.8364,
|
|
"step": 2890
|
|
},
|
|
{
|
|
"epoch": 2.2324865280985375,
|
|
"grad_norm": 17.839075088500977,
|
|
"learning_rate": 4.688068624902522e-05,
|
|
"loss": 0.8933,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 2.240184757505774,
|
|
"grad_norm": 63.97893524169922,
|
|
"learning_rate": 4.6867689108396157e-05,
|
|
"loss": 0.8879,
|
|
"step": 2910
|
|
},
|
|
{
|
|
"epoch": 2.24788298691301,
|
|
"grad_norm": 25.88020133972168,
|
|
"learning_rate": 4.685469196776709e-05,
|
|
"loss": 1.1249,
|
|
"step": 2920
|
|
},
|
|
{
|
|
"epoch": 2.2555812163202464,
|
|
"grad_norm": 61.59385299682617,
|
|
"learning_rate": 4.684169482713803e-05,
|
|
"loss": 0.9538,
|
|
"step": 2930
|
|
},
|
|
{
|
|
"epoch": 2.2632794457274827,
|
|
"grad_norm": 313.2900390625,
|
|
"learning_rate": 4.682869768650897e-05,
|
|
"loss": 0.875,
|
|
"step": 2940
|
|
},
|
|
{
|
|
"epoch": 2.270977675134719,
|
|
"grad_norm": 24.295677185058594,
|
|
"learning_rate": 4.681570054587991e-05,
|
|
"loss": 1.0275,
|
|
"step": 2950
|
|
},
|
|
{
|
|
"epoch": 2.2786759045419553,
|
|
"grad_norm": 20.744089126586914,
|
|
"learning_rate": 4.6802703405250845e-05,
|
|
"loss": 0.9133,
|
|
"step": 2960
|
|
},
|
|
{
|
|
"epoch": 2.2863741339491916,
|
|
"grad_norm": 52.287078857421875,
|
|
"learning_rate": 4.678970626462179e-05,
|
|
"loss": 0.7897,
|
|
"step": 2970
|
|
},
|
|
{
|
|
"epoch": 2.294072363356428,
|
|
"grad_norm": 21.226224899291992,
|
|
"learning_rate": 4.677670912399272e-05,
|
|
"loss": 0.9604,
|
|
"step": 2980
|
|
},
|
|
{
|
|
"epoch": 2.3017705927636642,
|
|
"grad_norm": 12.787373542785645,
|
|
"learning_rate": 4.6763711983363663e-05,
|
|
"loss": 0.8511,
|
|
"step": 2990
|
|
},
|
|
{
|
|
"epoch": 2.3094688221709005,
|
|
"grad_norm": 22.66034507751465,
|
|
"learning_rate": 4.67507148427346e-05,
|
|
"loss": 1.0062,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 2.3094688221709005,
|
|
"eval_accuracy": 0.588144726712856,
|
|
"eval_f1": 0.5671167090208166,
|
|
"eval_loss": 0.9636054039001465,
|
|
"eval_precision": 0.5876103570700653,
|
|
"eval_recall": 0.588144726712856,
|
|
"eval_runtime": 7.9352,
|
|
"eval_samples_per_second": 327.401,
|
|
"eval_steps_per_second": 40.957,
|
|
"step": 3000
|
|
}
|
|
],
|
|
"logging_steps": 10,
|
|
"max_steps": 38970,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 30,
|
|
"save_steps": 500,
|
|
"total_flos": 277929582871200.0,
|
|
"train_batch_size": 8,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|