|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.3192612137203166, |
|
"eval_steps": 500, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.989006156552331e-05, |
|
"loss": 1.8193, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.978012313104662e-05, |
|
"loss": 1.7656, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.967018469656992e-05, |
|
"loss": 1.7573, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.956024626209323e-05, |
|
"loss": 1.6758, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9450307827616535e-05, |
|
"loss": 1.5776, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.934036939313985e-05, |
|
"loss": 1.615, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9230430958663146e-05, |
|
"loss": 1.5378, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.912049252418646e-05, |
|
"loss": 1.6188, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.901055408970976e-05, |
|
"loss": 1.5489, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8900615655233075e-05, |
|
"loss": 1.5724, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.879067722075638e-05, |
|
"loss": 1.5262, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8680738786279686e-05, |
|
"loss": 1.5224, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.857080035180299e-05, |
|
"loss": 1.5176, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.84608619173263e-05, |
|
"loss": 1.5263, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.835092348284961e-05, |
|
"loss": 1.5832, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.824098504837291e-05, |
|
"loss": 1.5403, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.813104661389622e-05, |
|
"loss": 1.5689, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.802110817941953e-05, |
|
"loss": 1.5389, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7911169744942836e-05, |
|
"loss": 1.5593, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.780123131046614e-05, |
|
"loss": 1.52, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7691292875989446e-05, |
|
"loss": 1.6009, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.758135444151275e-05, |
|
"loss": 1.5612, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7471416007036063e-05, |
|
"loss": 1.5532, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.736147757255937e-05, |
|
"loss": 1.5636, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7251539138082674e-05, |
|
"loss": 1.56, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.714160070360598e-05, |
|
"loss": 1.5406, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.703166226912929e-05, |
|
"loss": 1.4851, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6921723834652596e-05, |
|
"loss": 1.5333, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.681178540017591e-05, |
|
"loss": 1.5684, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.670184696569921e-05, |
|
"loss": 1.5636, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.659190853122252e-05, |
|
"loss": 1.5434, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6481970096745824e-05, |
|
"loss": 1.5002, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6372031662269136e-05, |
|
"loss": 1.5413, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6262093227792435e-05, |
|
"loss": 1.5275, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.615215479331575e-05, |
|
"loss": 1.508, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.604221635883905e-05, |
|
"loss": 1.5033, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.593227792436236e-05, |
|
"loss": 1.5416, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.582233948988567e-05, |
|
"loss": 1.5092, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5712401055408974e-05, |
|
"loss": 1.5605, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.560246262093228e-05, |
|
"loss": 1.5624, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5492524186455585e-05, |
|
"loss": 1.5394, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.53825857519789e-05, |
|
"loss": 1.476, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.52726473175022e-05, |
|
"loss": 1.5367, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.516270888302551e-05, |
|
"loss": 1.5041, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.505277044854881e-05, |
|
"loss": 1.5342, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4942832014072125e-05, |
|
"loss": 1.5068, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.483289357959543e-05, |
|
"loss": 1.4912, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4722955145118735e-05, |
|
"loss": 1.4893, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.461301671064204e-05, |
|
"loss": 1.5338, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.450307827616535e-05, |
|
"loss": 1.5258, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.439313984168866e-05, |
|
"loss": 1.5373, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.428320140721196e-05, |
|
"loss": 1.4972, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.417326297273527e-05, |
|
"loss": 1.4868, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.406332453825858e-05, |
|
"loss": 1.5167, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3953386103781885e-05, |
|
"loss": 1.5242, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.384344766930519e-05, |
|
"loss": 1.5309, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3733509234828496e-05, |
|
"loss": 1.5091, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.36235708003518e-05, |
|
"loss": 1.5041, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.351363236587511e-05, |
|
"loss": 1.4934, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.340369393139842e-05, |
|
"loss": 1.5113, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.329375549692173e-05, |
|
"loss": 1.4899, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.318381706244503e-05, |
|
"loss": 1.5019, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.307387862796834e-05, |
|
"loss": 1.5019, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2963940193491646e-05, |
|
"loss": 1.5331, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.285400175901496e-05, |
|
"loss": 1.5444, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2744063324538256e-05, |
|
"loss": 1.527, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.263412489006157e-05, |
|
"loss": 1.5329, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2524186455584874e-05, |
|
"loss": 1.5572, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2414248021108186e-05, |
|
"loss": 1.53, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.230430958663149e-05, |
|
"loss": 1.4891, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2194371152154796e-05, |
|
"loss": 1.5004, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.20844327176781e-05, |
|
"loss": 1.5134, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1974494283201407e-05, |
|
"loss": 1.5081, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.186455584872472e-05, |
|
"loss": 1.5006, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1754617414248024e-05, |
|
"loss": 1.515, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.164467897977133e-05, |
|
"loss": 1.4862, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1534740545294634e-05, |
|
"loss": 1.5196, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1424802110817946e-05, |
|
"loss": 1.4908, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.131486367634125e-05, |
|
"loss": 1.5443, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.120492524186456e-05, |
|
"loss": 1.5457, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.109498680738786e-05, |
|
"loss": 1.5616, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0985048372911174e-05, |
|
"loss": 1.5537, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.087510993843448e-05, |
|
"loss": 1.5206, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0765171503957784e-05, |
|
"loss": 1.4923, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.065523306948109e-05, |
|
"loss": 1.4721, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05452946350044e-05, |
|
"loss": 1.5357, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.043535620052771e-05, |
|
"loss": 1.5163, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.032541776605102e-05, |
|
"loss": 1.5406, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.021547933157432e-05, |
|
"loss": 1.5151, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.010554089709763e-05, |
|
"loss": 1.5482, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9995602462620935e-05, |
|
"loss": 1.487, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.988566402814424e-05, |
|
"loss": 1.5559, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9775725593667545e-05, |
|
"loss": 1.5085, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.966578715919085e-05, |
|
"loss": 1.5091, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.955584872471416e-05, |
|
"loss": 1.512, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.944591029023747e-05, |
|
"loss": 1.4824, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.933597185576078e-05, |
|
"loss": 1.4935, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.922603342128408e-05, |
|
"loss": 1.5034, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.911609498680739e-05, |
|
"loss": 1.5449, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9006156552330695e-05, |
|
"loss": 1.481, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.889621811785401e-05, |
|
"loss": 1.554, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8786279683377306e-05, |
|
"loss": 1.5155, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.867634124890062e-05, |
|
"loss": 1.5023, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.856640281442392e-05, |
|
"loss": 1.5311, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8456464379947235e-05, |
|
"loss": 1.554, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.834652594547054e-05, |
|
"loss": 1.5175, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8236587510993846e-05, |
|
"loss": 1.499, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.812664907651715e-05, |
|
"loss": 1.514, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.801671064204046e-05, |
|
"loss": 1.5116, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.790677220756377e-05, |
|
"loss": 1.5545, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.779683377308707e-05, |
|
"loss": 1.5126, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.768689533861038e-05, |
|
"loss": 1.5129, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7576956904133684e-05, |
|
"loss": 1.5148, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7467018469656996e-05, |
|
"loss": 1.5715, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.73570800351803e-05, |
|
"loss": 1.5227, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7247141600703606e-05, |
|
"loss": 1.4697, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.713720316622691e-05, |
|
"loss": 1.5557, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7027264731750223e-05, |
|
"loss": 1.5204, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.691732629727353e-05, |
|
"loss": 1.5246, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.680738786279684e-05, |
|
"loss": 1.526, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.669744942832014e-05, |
|
"loss": 1.5052, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.658751099384345e-05, |
|
"loss": 1.5383, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6477572559366756e-05, |
|
"loss": 1.5561, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.636763412489007e-05, |
|
"loss": 1.5293, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.625769569041337e-05, |
|
"loss": 1.5146, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.614775725593668e-05, |
|
"loss": 1.5626, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6037818821459984e-05, |
|
"loss": 1.5603, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.592788038698329e-05, |
|
"loss": 1.538, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.58179419525066e-05, |
|
"loss": 1.4977, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5708003518029907e-05, |
|
"loss": 1.4758, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.559806508355321e-05, |
|
"loss": 1.4657, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.548812664907652e-05, |
|
"loss": 1.5382, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.537818821459983e-05, |
|
"loss": 1.4825, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.526824978012313e-05, |
|
"loss": 1.5182, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.515831134564644e-05, |
|
"loss": 1.5143, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5048372911169745e-05, |
|
"loss": 1.5374, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.493843447669306e-05, |
|
"loss": 1.5335, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.482849604221636e-05, |
|
"loss": 1.532, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.471855760773967e-05, |
|
"loss": 1.5056, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.460861917326297e-05, |
|
"loss": 1.5248, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4498680738786285e-05, |
|
"loss": 1.5129, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.438874230430959e-05, |
|
"loss": 1.5223, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4278803869832895e-05, |
|
"loss": 1.5236, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.41688654353562e-05, |
|
"loss": 1.4966, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.405892700087951e-05, |
|
"loss": 1.5395, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.394898856640282e-05, |
|
"loss": 1.5559, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.383905013192612e-05, |
|
"loss": 1.4817, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.372911169744943e-05, |
|
"loss": 1.4662, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.361917326297273e-05, |
|
"loss": 1.5073, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3509234828496045e-05, |
|
"loss": 1.5354, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.339929639401935e-05, |
|
"loss": 1.5353, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3289357959542656e-05, |
|
"loss": 1.4755, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.317941952506596e-05, |
|
"loss": 1.5223, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.306948109058927e-05, |
|
"loss": 1.4958, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.295954265611258e-05, |
|
"loss": 1.5234, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.284960422163589e-05, |
|
"loss": 1.5169, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.273966578715919e-05, |
|
"loss": 1.5107, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.26297273526825e-05, |
|
"loss": 1.5183, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2519788918205806e-05, |
|
"loss": 1.4787, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.240985048372912e-05, |
|
"loss": 1.5123, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2299912049252416e-05, |
|
"loss": 1.528, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.218997361477573e-05, |
|
"loss": 1.5125, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2080035180299034e-05, |
|
"loss": 1.5148, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.197009674582234e-05, |
|
"loss": 1.5091, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.186015831134565e-05, |
|
"loss": 1.4923, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1750219876868956e-05, |
|
"loss": 1.5544, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.164028144239226e-05, |
|
"loss": 1.5111, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1530343007915567e-05, |
|
"loss": 1.5814, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.142040457343888e-05, |
|
"loss": 1.4826, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.131046613896218e-05, |
|
"loss": 1.4994, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.120052770448549e-05, |
|
"loss": 1.4444, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1090589270008794e-05, |
|
"loss": 1.5175, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.0980650835532106e-05, |
|
"loss": 1.4922, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.087071240105541e-05, |
|
"loss": 1.5155, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.076077396657872e-05, |
|
"loss": 1.5037, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.065083553210202e-05, |
|
"loss": 1.5448, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0540897097625334e-05, |
|
"loss": 1.4882, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.043095866314864e-05, |
|
"loss": 1.514, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.032102022867194e-05, |
|
"loss": 1.4848, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.021108179419525e-05, |
|
"loss": 1.4672, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.010114335971856e-05, |
|
"loss": 1.5191, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9991204925241867e-05, |
|
"loss": 1.4836, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9881266490765176e-05, |
|
"loss": 1.5088, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9771328056288477e-05, |
|
"loss": 1.5183, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9661389621811786e-05, |
|
"loss": 1.5508, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9551451187335095e-05, |
|
"loss": 1.5068, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9441512752858403e-05, |
|
"loss": 1.512, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9331574318381705e-05, |
|
"loss": 1.523, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9221635883905014e-05, |
|
"loss": 1.4935, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9111697449428322e-05, |
|
"loss": 1.4949, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.900175901495163e-05, |
|
"loss": 1.5423, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8891820580474936e-05, |
|
"loss": 1.5473, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.878188214599824e-05, |
|
"loss": 1.4828, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8671943711521547e-05, |
|
"loss": 1.5154, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8562005277044855e-05, |
|
"loss": 1.4707, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8452066842568164e-05, |
|
"loss": 1.5086, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8342128408091473e-05, |
|
"loss": 1.5008, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8232189973614774e-05, |
|
"loss": 1.5568, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8122251539138083e-05, |
|
"loss": 1.464, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.801231310466139e-05, |
|
"loss": 1.5346, |
|
"step": 2000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4548, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1.263891637642199e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|