|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.5259612685938815, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8e-08, |
|
"loss": 11.8368, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6e-07, |
|
"loss": 11.103, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4e-07, |
|
"loss": 11.6842, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.2e-07, |
|
"loss": 11.7279, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4e-07, |
|
"loss": 11.6439, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8e-07, |
|
"loss": 11.1668, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.6e-07, |
|
"loss": 11.5542, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.4e-07, |
|
"loss": 11.2201, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.2e-07, |
|
"loss": 10.0118, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8e-07, |
|
"loss": 9.2907, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.799999999999999e-07, |
|
"loss": 8.9216, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.6e-07, |
|
"loss": 8.9268, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.04e-06, |
|
"loss": 8.6545, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.12e-06, |
|
"loss": 8.7888, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.2e-06, |
|
"loss": 9.0201, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.28e-06, |
|
"loss": 8.0512, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 5.9266, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.44e-06, |
|
"loss": 5.3527, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.5199999999999998e-06, |
|
"loss": 4.9847, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.6e-06, |
|
"loss": 4.8117, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_webgpt_accuracy": 0.3513391153050626, |
|
"eval_webgpt_loss": 4.794132709503174, |
|
"eval_webgpt_runtime": 39.5471, |
|
"eval_webgpt_samples_per_second": 90.095, |
|
"eval_webgpt_steps_per_second": 22.53, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.6799999999999998e-06, |
|
"loss": 4.7854, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7599999999999999e-06, |
|
"loss": 4.5555, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.84e-06, |
|
"loss": 4.4819, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.92e-06, |
|
"loss": 4.3504, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2e-06, |
|
"loss": 4.1471, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.08e-06, |
|
"loss": 4.4132, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.16e-06, |
|
"loss": 4.1309, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.24e-06, |
|
"loss": 4.4153, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.32e-06, |
|
"loss": 4.3256, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.4e-06, |
|
"loss": 4.3798, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.48e-06, |
|
"loss": 4.3665, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.56e-06, |
|
"loss": 4.2545, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.64e-06, |
|
"loss": 4.1912, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 4.2359, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8e-06, |
|
"loss": 4.2145, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.88e-06, |
|
"loss": 4.0716, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.96e-06, |
|
"loss": 4.0429, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.0399999999999997e-06, |
|
"loss": 4.1404, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 3.1199999999999998e-06, |
|
"loss": 3.9878, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.2e-06, |
|
"loss": 4.1169, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_webgpt_accuracy": 0.3585799846229516, |
|
"eval_webgpt_loss": 3.982567310333252, |
|
"eval_webgpt_runtime": 39.5324, |
|
"eval_webgpt_samples_per_second": 90.129, |
|
"eval_webgpt_steps_per_second": 22.538, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.2799999999999995e-06, |
|
"loss": 3.9303, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.3599999999999996e-06, |
|
"loss": 4.0221, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.4399999999999997e-06, |
|
"loss": 3.8958, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.5199999999999998e-06, |
|
"loss": 3.8434, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.0083, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.68e-06, |
|
"loss": 3.9172, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.7599999999999996e-06, |
|
"loss": 4.0955, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.84e-06, |
|
"loss": 4.1085, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.92e-06, |
|
"loss": 4.149, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4e-06, |
|
"loss": 4.0345, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.08e-06, |
|
"loss": 3.9929, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.16e-06, |
|
"loss": 3.7969, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.24e-06, |
|
"loss": 3.6102, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.32e-06, |
|
"loss": 3.744, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.4e-06, |
|
"loss": 3.8242, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.48e-06, |
|
"loss": 3.8092, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.5599999999999995e-06, |
|
"loss": 3.8835, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.64e-06, |
|
"loss": 3.7335, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.72e-06, |
|
"loss": 3.8462, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.8e-06, |
|
"loss": 3.7422, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"eval_webgpt_accuracy": 0.36334147099294245, |
|
"eval_webgpt_loss": 3.8047080039978027, |
|
"eval_webgpt_runtime": 39.6534, |
|
"eval_webgpt_samples_per_second": 89.854, |
|
"eval_webgpt_steps_per_second": 22.47, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.88e-06, |
|
"loss": 3.7068, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.96e-06, |
|
"loss": 3.69, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 5.04e-06, |
|
"loss": 3.65, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 5.12e-06, |
|
"loss": 3.6972, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 5.2e-06, |
|
"loss": 3.7907, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 5.28e-06, |
|
"loss": 3.7505, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 5.36e-06, |
|
"loss": 3.8109, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 3.8612, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.52e-06, |
|
"loss": 3.7077, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.6e-06, |
|
"loss": 3.8546, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.68e-06, |
|
"loss": 3.8801, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.76e-06, |
|
"loss": 3.7939, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.84e-06, |
|
"loss": 3.6499, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.92e-06, |
|
"loss": 4.0742, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6e-06, |
|
"loss": 3.8072, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.079999999999999e-06, |
|
"loss": 3.7959, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.1599999999999995e-06, |
|
"loss": 3.7556, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.2399999999999995e-06, |
|
"loss": 3.8263, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.32e-06, |
|
"loss": 3.9162, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 6.4e-06, |
|
"loss": 3.7188, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"eval_webgpt_accuracy": 0.3654184548784667, |
|
"eval_webgpt_loss": 3.7724239826202393, |
|
"eval_webgpt_runtime": 39.717, |
|
"eval_webgpt_samples_per_second": 89.71, |
|
"eval_webgpt_steps_per_second": 22.434, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 6.48e-06, |
|
"loss": 3.7938, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.559999999999999e-06, |
|
"loss": 3.6401, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 6.639999999999999e-06, |
|
"loss": 3.7234, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 6.719999999999999e-06, |
|
"loss": 3.6234, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 6.799999999999999e-06, |
|
"loss": 3.7614, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 6.879999999999999e-06, |
|
"loss": 3.7001, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6.9599999999999994e-06, |
|
"loss": 3.8073, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 7.0399999999999995e-06, |
|
"loss": 3.6328, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 7.12e-06, |
|
"loss": 3.9974, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.2e-06, |
|
"loss": 3.6881, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.28e-06, |
|
"loss": 3.7669, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.36e-06, |
|
"loss": 3.5966, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.44e-06, |
|
"loss": 3.748, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.519999999999999e-06, |
|
"loss": 3.7903, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 7.599999999999999e-06, |
|
"loss": 3.7352, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 7.68e-06, |
|
"loss": 3.5784, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.76e-06, |
|
"loss": 3.5768, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.84e-06, |
|
"loss": 3.7704, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.92e-06, |
|
"loss": 3.6493, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 8e-06, |
|
"loss": 3.6712, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"eval_webgpt_accuracy": 0.36571671085272284, |
|
"eval_webgpt_loss": 3.766130208969116, |
|
"eval_webgpt_runtime": 39.6543, |
|
"eval_webgpt_samples_per_second": 89.852, |
|
"eval_webgpt_steps_per_second": 22.469, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.941733430444282e-06, |
|
"loss": 3.8326, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.883466860888565e-06, |
|
"loss": 3.6731, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.825200291332846e-06, |
|
"loss": 3.6476, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.76693372177713e-06, |
|
"loss": 3.7165, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 7.708667152221412e-06, |
|
"loss": 3.6997, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 7.650400582665694e-06, |
|
"loss": 3.7811, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 7.592134013109977e-06, |
|
"loss": 3.6025, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 7.533867443554261e-06, |
|
"loss": 3.8738, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 7.475600873998543e-06, |
|
"loss": 3.7493, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 7.4173343044428255e-06, |
|
"loss": 3.8542, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 7.359067734887107e-06, |
|
"loss": 3.4613, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 7.300801165331391e-06, |
|
"loss": 3.7582, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 7.242534595775673e-06, |
|
"loss": 3.6542, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 7.184268026219956e-06, |
|
"loss": 3.6867, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 7.126001456664238e-06, |
|
"loss": 3.6046, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 7.067734887108521e-06, |
|
"loss": 3.7645, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 7.009468317552803e-06, |
|
"loss": 3.8135, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 6.951201747997087e-06, |
|
"loss": 3.8123, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 6.8929351784413686e-06, |
|
"loss": 3.743, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 6.834668608885651e-06, |
|
"loss": 3.8658, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"eval_webgpt_accuracy": 0.36873122296639843, |
|
"eval_webgpt_loss": 3.7487637996673584, |
|
"eval_webgpt_runtime": 39.5934, |
|
"eval_webgpt_samples_per_second": 89.99, |
|
"eval_webgpt_steps_per_second": 22.504, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.776402039329934e-06, |
|
"loss": 3.7027, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.718135469774217e-06, |
|
"loss": 3.6325, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.659868900218499e-06, |
|
"loss": 3.7518, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 6.601602330662782e-06, |
|
"loss": 3.5668, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 6.543335761107064e-06, |
|
"loss": 3.7264, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 6.485069191551347e-06, |
|
"loss": 3.5763, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 6.426802621995629e-06, |
|
"loss": 3.7075, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 6.3685360524399125e-06, |
|
"loss": 3.6597, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 6.310269482884194e-06, |
|
"loss": 3.8605, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 6.252002913328477e-06, |
|
"loss": 3.7568, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 6.19373634377276e-06, |
|
"loss": 3.6982, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 6.135469774217043e-06, |
|
"loss": 3.8453, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 6.0772032046613246e-06, |
|
"loss": 3.8372, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 6.018936635105608e-06, |
|
"loss": 3.5192, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 5.96067006554989e-06, |
|
"loss": 3.7037, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 5.902403495994173e-06, |
|
"loss": 3.4107, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 5.844136926438456e-06, |
|
"loss": 3.5911, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 5.785870356882738e-06, |
|
"loss": 3.7885, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 5.72760378732702e-06, |
|
"loss": 3.5698, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 5.669337217771304e-06, |
|
"loss": 3.6686, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"eval_webgpt_accuracy": 0.3689563590269814, |
|
"eval_webgpt_loss": 3.7346436977386475, |
|
"eval_webgpt_runtime": 39.5649, |
|
"eval_webgpt_samples_per_second": 90.055, |
|
"eval_webgpt_steps_per_second": 22.52, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.611070648215586e-06, |
|
"loss": 3.7345, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.5528040786598685e-06, |
|
"loss": 3.8317, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 5.49453750910415e-06, |
|
"loss": 3.7166, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 5.436270939548434e-06, |
|
"loss": 3.7229, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 5.378004369992716e-06, |
|
"loss": 3.4168, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 5.319737800436999e-06, |
|
"loss": 3.9726, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 5.2614712308812814e-06, |
|
"loss": 3.8361, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.203204661325564e-06, |
|
"loss": 3.5802, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 5.144938091769846e-06, |
|
"loss": 3.9139, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 5.08667152221413e-06, |
|
"loss": 3.5712, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 5.028404952658412e-06, |
|
"loss": 3.6163, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.970138383102694e-06, |
|
"loss": 3.5838, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.911871813546977e-06, |
|
"loss": 3.8421, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.85360524399126e-06, |
|
"loss": 3.6685, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.795338674435542e-06, |
|
"loss": 3.6735, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.737072104879825e-06, |
|
"loss": 3.7081, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.678805535324107e-06, |
|
"loss": 3.7911, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.62053896576839e-06, |
|
"loss": 3.7296, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.562272396212672e-06, |
|
"loss": 3.6247, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.5040058266569555e-06, |
|
"loss": 3.5536, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"eval_webgpt_accuracy": 0.3708860692093479, |
|
"eval_webgpt_loss": 3.709535837173462, |
|
"eval_webgpt_runtime": 39.7168, |
|
"eval_webgpt_samples_per_second": 89.71, |
|
"eval_webgpt_steps_per_second": 22.434, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.4457392571012374e-06, |
|
"loss": 3.3749, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.38747268754552e-06, |
|
"loss": 3.6204, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.329206117989803e-06, |
|
"loss": 3.6534, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.270939548434086e-06, |
|
"loss": 3.769, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.212672978878368e-06, |
|
"loss": 3.4946, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.154406409322651e-06, |
|
"loss": 3.4767, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.096139839766933e-06, |
|
"loss": 4.0031, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.037873270211216e-06, |
|
"loss": 3.5801, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.979606700655499e-06, |
|
"loss": 3.7839, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.921340131099781e-06, |
|
"loss": 3.389, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.863073561544064e-06, |
|
"loss": 3.5539, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.8048069919883465e-06, |
|
"loss": 3.7058, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.746540422432629e-06, |
|
"loss": 3.6334, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.6882738528769115e-06, |
|
"loss": 3.5749, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.6300072833211943e-06, |
|
"loss": 3.5804, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.571740713765477e-06, |
|
"loss": 3.5608, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.5134741442097594e-06, |
|
"loss": 3.8894, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.455207574654042e-06, |
|
"loss": 3.4811, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.396941005098325e-06, |
|
"loss": 3.5773, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.338674435542607e-06, |
|
"loss": 3.4083, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"eval_webgpt_accuracy": 0.37110112351569674, |
|
"eval_webgpt_loss": 3.7004425525665283, |
|
"eval_webgpt_runtime": 39.7103, |
|
"eval_webgpt_samples_per_second": 89.725, |
|
"eval_webgpt_steps_per_second": 22.438, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.28040786598689e-06, |
|
"loss": 3.5236, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.2221412964311723e-06, |
|
"loss": 3.6029, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.163874726875455e-06, |
|
"loss": 3.6437, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.105608157319738e-06, |
|
"loss": 3.5334, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.04734158776402e-06, |
|
"loss": 3.5593, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.989075018208303e-06, |
|
"loss": 3.8394, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.9308084486525856e-06, |
|
"loss": 3.5156, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.872541879096868e-06, |
|
"loss": 3.5369, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.8142753095411507e-06, |
|
"loss": 3.5477, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.7560087399854335e-06, |
|
"loss": 3.6827, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.697742170429716e-06, |
|
"loss": 3.5929, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.6394756008739986e-06, |
|
"loss": 3.7087, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.581209031318281e-06, |
|
"loss": 3.4699, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.5229424617625636e-06, |
|
"loss": 3.5238, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.4646758922068464e-06, |
|
"loss": 3.6169, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.4064093226511287e-06, |
|
"loss": 3.536, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.3481427530954115e-06, |
|
"loss": 3.4735, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.2898761835396942e-06, |
|
"loss": 3.616, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.2316096139839766e-06, |
|
"loss": 3.4397, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.1733430444282593e-06, |
|
"loss": 3.6732, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"eval_webgpt_accuracy": 0.37167770768266506, |
|
"eval_webgpt_loss": 3.6911776065826416, |
|
"eval_webgpt_runtime": 39.7365, |
|
"eval_webgpt_samples_per_second": 89.666, |
|
"eval_webgpt_steps_per_second": 22.423, |
|
"step": 2000 |
|
} |
|
], |
|
"max_steps": 2373, |
|
"num_train_epochs": 3, |
|
"total_flos": 2456896174817280.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|