|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6596306068601583, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.989006156552331e-05, |
|
"loss": 1.8193, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.978012313104662e-05, |
|
"loss": 1.7656, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.967018469656992e-05, |
|
"loss": 1.7573, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.956024626209323e-05, |
|
"loss": 1.6758, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9450307827616535e-05, |
|
"loss": 1.5776, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.934036939313985e-05, |
|
"loss": 1.615, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9230430958663146e-05, |
|
"loss": 1.5378, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.912049252418646e-05, |
|
"loss": 1.6188, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.901055408970976e-05, |
|
"loss": 1.5489, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8900615655233075e-05, |
|
"loss": 1.5724, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.879067722075638e-05, |
|
"loss": 1.5262, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8680738786279686e-05, |
|
"loss": 1.5224, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.857080035180299e-05, |
|
"loss": 1.5176, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.84608619173263e-05, |
|
"loss": 1.5263, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.835092348284961e-05, |
|
"loss": 1.5832, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.824098504837291e-05, |
|
"loss": 1.5403, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.813104661389622e-05, |
|
"loss": 1.5689, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.802110817941953e-05, |
|
"loss": 1.5389, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7911169744942836e-05, |
|
"loss": 1.5593, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.780123131046614e-05, |
|
"loss": 1.52, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7691292875989446e-05, |
|
"loss": 1.6009, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.758135444151275e-05, |
|
"loss": 1.5612, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7471416007036063e-05, |
|
"loss": 1.5532, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.736147757255937e-05, |
|
"loss": 1.5636, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7251539138082674e-05, |
|
"loss": 1.56, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.714160070360598e-05, |
|
"loss": 1.5406, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.703166226912929e-05, |
|
"loss": 1.4851, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6921723834652596e-05, |
|
"loss": 1.5333, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.681178540017591e-05, |
|
"loss": 1.5684, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.670184696569921e-05, |
|
"loss": 1.5636, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.659190853122252e-05, |
|
"loss": 1.5434, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6481970096745824e-05, |
|
"loss": 1.5002, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6372031662269136e-05, |
|
"loss": 1.5413, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6262093227792435e-05, |
|
"loss": 1.5275, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.615215479331575e-05, |
|
"loss": 1.508, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.604221635883905e-05, |
|
"loss": 1.5033, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.593227792436236e-05, |
|
"loss": 1.5416, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.582233948988567e-05, |
|
"loss": 1.5092, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5712401055408974e-05, |
|
"loss": 1.5605, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.560246262093228e-05, |
|
"loss": 1.5624, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5492524186455585e-05, |
|
"loss": 1.5394, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.53825857519789e-05, |
|
"loss": 1.476, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.52726473175022e-05, |
|
"loss": 1.5367, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.516270888302551e-05, |
|
"loss": 1.5041, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.505277044854881e-05, |
|
"loss": 1.5342, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4942832014072125e-05, |
|
"loss": 1.5068, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.483289357959543e-05, |
|
"loss": 1.4912, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4722955145118735e-05, |
|
"loss": 1.4893, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.461301671064204e-05, |
|
"loss": 1.5338, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.450307827616535e-05, |
|
"loss": 1.5258, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.439313984168866e-05, |
|
"loss": 1.5373, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.428320140721196e-05, |
|
"loss": 1.4972, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.417326297273527e-05, |
|
"loss": 1.4868, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.406332453825858e-05, |
|
"loss": 1.5167, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3953386103781885e-05, |
|
"loss": 1.5242, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.384344766930519e-05, |
|
"loss": 1.5309, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3733509234828496e-05, |
|
"loss": 1.5091, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.36235708003518e-05, |
|
"loss": 1.5041, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.351363236587511e-05, |
|
"loss": 1.4934, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.340369393139842e-05, |
|
"loss": 1.5113, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.329375549692173e-05, |
|
"loss": 1.4899, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.318381706244503e-05, |
|
"loss": 1.5019, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.307387862796834e-05, |
|
"loss": 1.5019, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2963940193491646e-05, |
|
"loss": 1.5331, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.285400175901496e-05, |
|
"loss": 1.5444, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2744063324538256e-05, |
|
"loss": 1.527, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.263412489006157e-05, |
|
"loss": 1.5329, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2524186455584874e-05, |
|
"loss": 1.5572, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2414248021108186e-05, |
|
"loss": 1.53, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.230430958663149e-05, |
|
"loss": 1.4891, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2194371152154796e-05, |
|
"loss": 1.5004, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.20844327176781e-05, |
|
"loss": 1.5134, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1974494283201407e-05, |
|
"loss": 1.5081, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.186455584872472e-05, |
|
"loss": 1.5006, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1754617414248024e-05, |
|
"loss": 1.515, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.164467897977133e-05, |
|
"loss": 1.4862, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1534740545294634e-05, |
|
"loss": 1.5196, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1424802110817946e-05, |
|
"loss": 1.4908, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.131486367634125e-05, |
|
"loss": 1.5443, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.120492524186456e-05, |
|
"loss": 1.5457, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.109498680738786e-05, |
|
"loss": 1.5616, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0985048372911174e-05, |
|
"loss": 1.5537, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.087510993843448e-05, |
|
"loss": 1.5206, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0765171503957784e-05, |
|
"loss": 1.4923, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.065523306948109e-05, |
|
"loss": 1.4721, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05452946350044e-05, |
|
"loss": 1.5357, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.043535620052771e-05, |
|
"loss": 1.5163, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.032541776605102e-05, |
|
"loss": 1.5406, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.021547933157432e-05, |
|
"loss": 1.5151, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.010554089709763e-05, |
|
"loss": 1.5482, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9995602462620935e-05, |
|
"loss": 1.487, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.988566402814424e-05, |
|
"loss": 1.5559, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9775725593667545e-05, |
|
"loss": 1.5085, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.966578715919085e-05, |
|
"loss": 1.5091, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.955584872471416e-05, |
|
"loss": 1.512, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.944591029023747e-05, |
|
"loss": 1.4824, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.933597185576078e-05, |
|
"loss": 1.4935, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.922603342128408e-05, |
|
"loss": 1.5034, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.911609498680739e-05, |
|
"loss": 1.5449, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9006156552330695e-05, |
|
"loss": 1.481, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4548, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 6.3202482192384e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|