|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.4171270718232045, |
|
"eval_steps": 500, |
|
"global_step": 3500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9884898710865566e-05, |
|
"loss": 1.8029, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.976979742173112e-05, |
|
"loss": 1.7809, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.965469613259669e-05, |
|
"loss": 1.7211, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.953959484346225e-05, |
|
"loss": 1.6942, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.942449355432781e-05, |
|
"loss": 1.7155, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.930939226519337e-05, |
|
"loss": 1.6549, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.919429097605893e-05, |
|
"loss": 1.6162, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9079189686924495e-05, |
|
"loss": 1.6446, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.896408839779006e-05, |
|
"loss": 1.6022, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.884898710865562e-05, |
|
"loss": 1.6405, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.873388581952118e-05, |
|
"loss": 1.5943, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.861878453038674e-05, |
|
"loss": 1.6267, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8503683241252305e-05, |
|
"loss": 1.5846, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.838858195211787e-05, |
|
"loss": 1.63, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8273480662983425e-05, |
|
"loss": 1.5956, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.815837937384899e-05, |
|
"loss": 1.6277, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.804327808471455e-05, |
|
"loss": 1.6073, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7928176795580114e-05, |
|
"loss": 1.6069, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.781307550644568e-05, |
|
"loss": 1.5771, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7697974217311234e-05, |
|
"loss": 1.5908, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.75828729281768e-05, |
|
"loss": 1.6272, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.746777163904236e-05, |
|
"loss": 1.5791, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7352670349907924e-05, |
|
"loss": 1.6181, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.723756906077349e-05, |
|
"loss": 1.5601, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7122467771639044e-05, |
|
"loss": 1.6027, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.700736648250461e-05, |
|
"loss": 1.5406, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6892265193370164e-05, |
|
"loss": 1.5931, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6777163904235734e-05, |
|
"loss": 1.5754, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.666206261510129e-05, |
|
"loss": 1.6021, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6546961325966854e-05, |
|
"loss": 1.612, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.643186003683242e-05, |
|
"loss": 1.6186, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6316758747697973e-05, |
|
"loss": 1.5881, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6201657458563544e-05, |
|
"loss": 1.5798, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.60865561694291e-05, |
|
"loss": 1.6227, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.597145488029466e-05, |
|
"loss": 1.5912, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.585635359116022e-05, |
|
"loss": 1.5674, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.574125230202578e-05, |
|
"loss": 1.6119, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5626151012891346e-05, |
|
"loss": 1.6162, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.551104972375691e-05, |
|
"loss": 1.6058, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.539594843462247e-05, |
|
"loss": 1.5731, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.528084714548803e-05, |
|
"loss": 1.5954, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.516574585635359e-05, |
|
"loss": 1.6355, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.505064456721915e-05, |
|
"loss": 1.6298, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.493554327808472e-05, |
|
"loss": 1.5923, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4820441988950276e-05, |
|
"loss": 1.5714, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.470534069981584e-05, |
|
"loss": 1.6087, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.45902394106814e-05, |
|
"loss": 1.5758, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.447513812154696e-05, |
|
"loss": 1.5886, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.436003683241253e-05, |
|
"loss": 1.583, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4244935543278086e-05, |
|
"loss": 1.5785, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.412983425414365e-05, |
|
"loss": 1.5999, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.401473296500921e-05, |
|
"loss": 1.5992, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.389963167587477e-05, |
|
"loss": 1.5715, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.378453038674034e-05, |
|
"loss": 1.5646, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3669429097605895e-05, |
|
"loss": 1.6251, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.355432780847146e-05, |
|
"loss": 1.5745, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3439226519337015e-05, |
|
"loss": 1.5911, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.332412523020258e-05, |
|
"loss": 1.587, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.320902394106814e-05, |
|
"loss": 1.5835, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3093922651933705e-05, |
|
"loss": 1.6068, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.297882136279927e-05, |
|
"loss": 1.5803, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2863720073664825e-05, |
|
"loss": 1.5946, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.274861878453039e-05, |
|
"loss": 1.6005, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.263351749539595e-05, |
|
"loss": 1.6477, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2518416206261515e-05, |
|
"loss": 1.6011, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.240331491712707e-05, |
|
"loss": 1.6011, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2288213627992634e-05, |
|
"loss": 1.6031, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.21731123388582e-05, |
|
"loss": 1.5861, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.205801104972376e-05, |
|
"loss": 1.6027, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1942909760589324e-05, |
|
"loss": 1.6076, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.182780847145488e-05, |
|
"loss": 1.5768, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1712707182320444e-05, |
|
"loss": 1.5869, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1597605893186e-05, |
|
"loss": 1.5882, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.148250460405157e-05, |
|
"loss": 1.6401, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.136740331491713e-05, |
|
"loss": 1.6014, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.125230202578269e-05, |
|
"loss": 1.5883, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1137200736648254e-05, |
|
"loss": 1.6217, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.102209944751381e-05, |
|
"loss": 1.6338, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.090699815837938e-05, |
|
"loss": 1.6276, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.079189686924494e-05, |
|
"loss": 1.5951, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.06767955801105e-05, |
|
"loss": 1.5975, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.056169429097606e-05, |
|
"loss": 1.5809, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.044659300184162e-05, |
|
"loss": 1.6179, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.033149171270719e-05, |
|
"loss": 1.589, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0216390423572746e-05, |
|
"loss": 1.5645, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.010128913443831e-05, |
|
"loss": 1.6079, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9986187845303866e-05, |
|
"loss": 1.5741, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.987108655616943e-05, |
|
"loss": 1.5888, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.975598526703499e-05, |
|
"loss": 1.6054, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9640883977900556e-05, |
|
"loss": 1.6023, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.952578268876612e-05, |
|
"loss": 1.5759, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9410681399631676e-05, |
|
"loss": 1.5565, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.929558011049724e-05, |
|
"loss": 1.5439, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.91804788213628e-05, |
|
"loss": 1.5901, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9065377532228366e-05, |
|
"loss": 1.6208, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.895027624309392e-05, |
|
"loss": 1.6171, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8835174953959486e-05, |
|
"loss": 1.6366, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.872007366482505e-05, |
|
"loss": 1.6199, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.860497237569061e-05, |
|
"loss": 1.6107, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8489871086556175e-05, |
|
"loss": 1.6071, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.837476979742173e-05, |
|
"loss": 1.5721, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8259668508287295e-05, |
|
"loss": 1.588, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.814456721915285e-05, |
|
"loss": 1.5894, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8029465930018415e-05, |
|
"loss": 1.5975, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.791436464088398e-05, |
|
"loss": 1.62, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.779926335174954e-05, |
|
"loss": 1.5392, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7684162062615105e-05, |
|
"loss": 1.6261, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.756906077348066e-05, |
|
"loss": 1.5745, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7453959484346225e-05, |
|
"loss": 1.6106, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.733885819521179e-05, |
|
"loss": 1.6011, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.722375690607735e-05, |
|
"loss": 1.6289, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.710865561694291e-05, |
|
"loss": 1.573, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.699355432780847e-05, |
|
"loss": 1.5707, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6878453038674034e-05, |
|
"loss": 1.5422, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.67633517495396e-05, |
|
"loss": 1.5895, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.664825046040516e-05, |
|
"loss": 1.5934, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.653314917127072e-05, |
|
"loss": 1.5843, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.641804788213628e-05, |
|
"loss": 1.5927, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6302946593001844e-05, |
|
"loss": 1.6082, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.618784530386741e-05, |
|
"loss": 1.5681, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.607274401473297e-05, |
|
"loss": 1.6185, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.595764272559853e-05, |
|
"loss": 1.5838, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.584254143646409e-05, |
|
"loss": 1.6025, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.572744014732965e-05, |
|
"loss": 1.5581, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.561233885819522e-05, |
|
"loss": 1.5908, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5497237569060774e-05, |
|
"loss": 1.5604, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.538213627992634e-05, |
|
"loss": 1.5891, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.52670349907919e-05, |
|
"loss": 1.5723, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.515193370165746e-05, |
|
"loss": 1.5808, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.503683241252303e-05, |
|
"loss": 1.6221, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.492173112338858e-05, |
|
"loss": 1.5888, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4806629834254147e-05, |
|
"loss": 1.5776, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.46915285451197e-05, |
|
"loss": 1.5566, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4576427255985266e-05, |
|
"loss": 1.6075, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.446132596685083e-05, |
|
"loss": 1.5859, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.434622467771639e-05, |
|
"loss": 1.5441, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4231123388581956e-05, |
|
"loss": 1.6225, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.411602209944751e-05, |
|
"loss": 1.5831, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4000920810313076e-05, |
|
"loss": 1.5368, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.388581952117864e-05, |
|
"loss": 1.5969, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.37707182320442e-05, |
|
"loss": 1.5595, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.365561694290976e-05, |
|
"loss": 1.6232, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.354051565377532e-05, |
|
"loss": 1.552, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3425414364640886e-05, |
|
"loss": 1.5653, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.331031307550645e-05, |
|
"loss": 1.5995, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.319521178637201e-05, |
|
"loss": 1.6068, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.308011049723757e-05, |
|
"loss": 1.5713, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.296500920810313e-05, |
|
"loss": 1.5737, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2849907918968695e-05, |
|
"loss": 1.5582, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.273480662983426e-05, |
|
"loss": 1.5205, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.261970534069982e-05, |
|
"loss": 1.5837, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.250460405156538e-05, |
|
"loss": 1.5892, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.238950276243094e-05, |
|
"loss": 1.5771, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.22744014732965e-05, |
|
"loss": 1.5562, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.215930018416207e-05, |
|
"loss": 1.5851, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2044198895027625e-05, |
|
"loss": 1.5894, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.192909760589319e-05, |
|
"loss": 1.5737, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.181399631675875e-05, |
|
"loss": 1.5666, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.169889502762431e-05, |
|
"loss": 1.6052, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.158379373848988e-05, |
|
"loss": 1.6086, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1468692449355434e-05, |
|
"loss": 1.6181, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1353591160221e-05, |
|
"loss": 1.5655, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1238489871086554e-05, |
|
"loss": 1.5748, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.112338858195212e-05, |
|
"loss": 1.6005, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.100828729281768e-05, |
|
"loss": 1.5932, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0893186003683244e-05, |
|
"loss": 1.5521, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.077808471454881e-05, |
|
"loss": 1.5873, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0662983425414364e-05, |
|
"loss": 1.611, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.054788213627993e-05, |
|
"loss": 1.5973, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0432780847145487e-05, |
|
"loss": 1.6008, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0317679558011054e-05, |
|
"loss": 1.5627, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0202578268876614e-05, |
|
"loss": 1.5959, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0087476979742174e-05, |
|
"loss": 1.5917, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9972375690607734e-05, |
|
"loss": 1.6139, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9857274401473297e-05, |
|
"loss": 1.5626, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.974217311233886e-05, |
|
"loss": 1.5806, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9627071823204423e-05, |
|
"loss": 1.5868, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9511970534069983e-05, |
|
"loss": 1.5841, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9396869244935543e-05, |
|
"loss": 1.5898, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9281767955801103e-05, |
|
"loss": 1.5813, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 1.5816, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.905156537753223e-05, |
|
"loss": 1.6343, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.893646408839779e-05, |
|
"loss": 1.5998, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8821362799263353e-05, |
|
"loss": 1.5399, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8706261510128913e-05, |
|
"loss": 1.5848, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.859116022099448e-05, |
|
"loss": 1.5968, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.847605893186004e-05, |
|
"loss": 1.582, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.83609576427256e-05, |
|
"loss": 1.5652, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.824585635359116e-05, |
|
"loss": 1.5522, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8130755064456722e-05, |
|
"loss": 1.5957, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8015653775322286e-05, |
|
"loss": 1.5799, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.790055248618785e-05, |
|
"loss": 1.5692, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.778545119705341e-05, |
|
"loss": 1.579, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.767034990791897e-05, |
|
"loss": 1.5635, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.755524861878453e-05, |
|
"loss": 1.5765, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7440147329650095e-05, |
|
"loss": 1.5705, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7325046040515655e-05, |
|
"loss": 1.5626, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7209944751381215e-05, |
|
"loss": 1.6164, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.709484346224678e-05, |
|
"loss": 1.5834, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.697974217311234e-05, |
|
"loss": 1.5571, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6864640883977905e-05, |
|
"loss": 1.5971, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6749539594843465e-05, |
|
"loss": 1.6115, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6634438305709025e-05, |
|
"loss": 1.573, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6519337016574585e-05, |
|
"loss": 1.5956, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6404235727440148e-05, |
|
"loss": 1.5467, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.628913443830571e-05, |
|
"loss": 1.5736, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6174033149171275e-05, |
|
"loss": 1.5593, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6058931860036835e-05, |
|
"loss": 1.5953, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5943830570902394e-05, |
|
"loss": 1.569, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5828729281767954e-05, |
|
"loss": 1.5567, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.571362799263352e-05, |
|
"loss": 1.6498, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.559852670349908e-05, |
|
"loss": 1.6232, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.548342541436464e-05, |
|
"loss": 1.5408, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5368324125230204e-05, |
|
"loss": 1.5992, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5253222836095764e-05, |
|
"loss": 1.5964, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.513812154696133e-05, |
|
"loss": 1.5716, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.502302025782689e-05, |
|
"loss": 1.5663, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.490791896869245e-05, |
|
"loss": 1.6271, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.479281767955801e-05, |
|
"loss": 1.5547, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4677716390423574e-05, |
|
"loss": 1.6519, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4562615101289137e-05, |
|
"loss": 1.6163, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4447513812154697e-05, |
|
"loss": 1.5848, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.433241252302026e-05, |
|
"loss": 1.6144, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.421731123388582e-05, |
|
"loss": 1.5702, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4102209944751383e-05, |
|
"loss": 1.6093, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3987108655616943e-05, |
|
"loss": 1.5885, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3872007366482503e-05, |
|
"loss": 1.5403, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3756906077348066e-05, |
|
"loss": 1.5285, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.364180478821363e-05, |
|
"loss": 1.6066, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3526703499079193e-05, |
|
"loss": 1.5911, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3411602209944753e-05, |
|
"loss": 1.5668, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3296500920810313e-05, |
|
"loss": 1.6309, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3181399631675876e-05, |
|
"loss": 1.5681, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3066298342541436e-05, |
|
"loss": 1.5751, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2951197053407e-05, |
|
"loss": 1.5797, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2836095764272563e-05, |
|
"loss": 1.5651, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2720994475138122e-05, |
|
"loss": 1.6188, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2605893186003686e-05, |
|
"loss": 1.5846, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2490791896869246e-05, |
|
"loss": 1.5483, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.237569060773481e-05, |
|
"loss": 1.5521, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.226058931860037e-05, |
|
"loss": 1.5387, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.214548802946593e-05, |
|
"loss": 1.5602, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2030386740331492e-05, |
|
"loss": 1.5724, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1915285451197055e-05, |
|
"loss": 1.5711, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.180018416206262e-05, |
|
"loss": 1.5658, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.168508287292818e-05, |
|
"loss": 1.6041, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.156998158379374e-05, |
|
"loss": 1.6189, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1454880294659302e-05, |
|
"loss": 1.6388, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.133977900552486e-05, |
|
"loss": 1.5726, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1224677716390425e-05, |
|
"loss": 1.5748, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1109576427255985e-05, |
|
"loss": 1.5393, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0994475138121548e-05, |
|
"loss": 1.5535, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.087937384898711e-05, |
|
"loss": 1.6028, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.076427255985267e-05, |
|
"loss": 1.6166, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.064917127071823e-05, |
|
"loss": 1.6016, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0534069981583794e-05, |
|
"loss": 1.5341, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0418968692449354e-05, |
|
"loss": 1.5805, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0303867403314918e-05, |
|
"loss": 1.5787, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.018876611418048e-05, |
|
"loss": 1.6065, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.007366482504604e-05, |
|
"loss": 1.5751, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9958563535911604e-05, |
|
"loss": 1.6446, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9843462246777164e-05, |
|
"loss": 1.5727, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9728360957642727e-05, |
|
"loss": 1.5928, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9613259668508287e-05, |
|
"loss": 1.5738, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9498158379373847e-05, |
|
"loss": 1.6051, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.938305709023941e-05, |
|
"loss": 1.6057, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9267955801104974e-05, |
|
"loss": 1.5234, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9152854511970537e-05, |
|
"loss": 1.6126, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9037753222836097e-05, |
|
"loss": 1.5784, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8922651933701657e-05, |
|
"loss": 1.5575, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.880755064456722e-05, |
|
"loss": 1.5637, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.869244935543278e-05, |
|
"loss": 1.5491, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8577348066298343e-05, |
|
"loss": 1.5797, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8462246777163907e-05, |
|
"loss": 1.5906, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8347145488029466e-05, |
|
"loss": 1.5797, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.823204419889503e-05, |
|
"loss": 1.5791, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.811694290976059e-05, |
|
"loss": 1.5562, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8001841620626153e-05, |
|
"loss": 1.5847, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7886740331491713e-05, |
|
"loss": 1.5813, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7771639042357273e-05, |
|
"loss": 1.5668, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7656537753222836e-05, |
|
"loss": 1.5273, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.75414364640884e-05, |
|
"loss": 1.6037, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7426335174953963e-05, |
|
"loss": 1.5607, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7311233885819523e-05, |
|
"loss": 1.5772, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7196132596685082e-05, |
|
"loss": 1.5402, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7081031307550646e-05, |
|
"loss": 1.5883, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6965930018416206e-05, |
|
"loss": 1.5583, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.685082872928177e-05, |
|
"loss": 1.6153, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6735727440147332e-05, |
|
"loss": 1.5841, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6620626151012892e-05, |
|
"loss": 1.5897, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6505524861878455e-05, |
|
"loss": 1.5762, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6390423572744015e-05, |
|
"loss": 1.6014, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6275322283609575e-05, |
|
"loss": 1.5949, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.616022099447514e-05, |
|
"loss": 1.5671, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.60451197053407e-05, |
|
"loss": 1.5655, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.593001841620626e-05, |
|
"loss": 1.5941, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5814917127071825e-05, |
|
"loss": 1.6386, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5699815837937385e-05, |
|
"loss": 1.5989, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5584714548802948e-05, |
|
"loss": 1.6067, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5469613259668508e-05, |
|
"loss": 1.5562, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.535451197053407e-05, |
|
"loss": 1.6221, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5239410681399633e-05, |
|
"loss": 1.5723, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5124309392265193e-05, |
|
"loss": 1.5636, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5009208103130756e-05, |
|
"loss": 1.5866, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4894106813996318e-05, |
|
"loss": 1.5698, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4779005524861881e-05, |
|
"loss": 1.5909, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4663904235727441e-05, |
|
"loss": 1.5857, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4548802946593e-05, |
|
"loss": 1.5646, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4433701657458564e-05, |
|
"loss": 1.584, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4318600368324126e-05, |
|
"loss": 1.5629, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4203499079189689e-05, |
|
"loss": 1.5359, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4088397790055249e-05, |
|
"loss": 1.5562, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.397329650092081e-05, |
|
"loss": 1.5617, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3858195211786374e-05, |
|
"loss": 1.5942, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3743093922651934e-05, |
|
"loss": 1.5608, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3627992633517497e-05, |
|
"loss": 1.6006, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3512891344383059e-05, |
|
"loss": 1.6083, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3397790055248618e-05, |
|
"loss": 1.5748, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3282688766114182e-05, |
|
"loss": 1.545, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3167587476979742e-05, |
|
"loss": 1.5444, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3052486187845307e-05, |
|
"loss": 1.5743, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2937384898710867e-05, |
|
"loss": 1.5568, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2822283609576426e-05, |
|
"loss": 1.5459, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.270718232044199e-05, |
|
"loss": 1.5682, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2592081031307551e-05, |
|
"loss": 1.5531, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2476979742173113e-05, |
|
"loss": 1.5779, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2361878453038675e-05, |
|
"loss": 1.5651, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2246777163904238e-05, |
|
"loss": 1.6048, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2131675874769798e-05, |
|
"loss": 1.5508, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.201657458563536e-05, |
|
"loss": 1.5763, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1901473296500921e-05, |
|
"loss": 1.5533, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1786372007366484e-05, |
|
"loss": 1.5837, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1671270718232044e-05, |
|
"loss": 1.5725, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1556169429097606e-05, |
|
"loss": 1.5419, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1441068139963167e-05, |
|
"loss": 1.5768, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.132596685082873e-05, |
|
"loss": 1.5989, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1210865561694292e-05, |
|
"loss": 1.5842, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1095764272559852e-05, |
|
"loss": 1.5623, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0980662983425415e-05, |
|
"loss": 1.6057, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0865561694290977e-05, |
|
"loss": 1.5817, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0750460405156539e-05, |
|
"loss": 1.552, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.06353591160221e-05, |
|
"loss": 1.6205, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0520257826887662e-05, |
|
"loss": 1.539, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0405156537753223e-05, |
|
"loss": 1.5936, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0290055248618785e-05, |
|
"loss": 1.5986, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0174953959484347e-05, |
|
"loss": 1.5493, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.005985267034991e-05, |
|
"loss": 1.5675, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.94475138121547e-06, |
|
"loss": 1.5784, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.829650092081031e-06, |
|
"loss": 1.5754, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.714548802946593e-06, |
|
"loss": 1.5657, |
|
"step": 3500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4344, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.2111388395005542e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|