|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 69.23076923076923, |
|
"eval_steps": 500, |
|
"global_step": 4500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 134.7136688232422, |
|
"learning_rate": 9.981600000000001e-06, |
|
"loss": 5.6198, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 115.98611450195312, |
|
"learning_rate": 9.961600000000001e-06, |
|
"loss": 3.5959, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 4.615384615384615, |
|
"grad_norm": 114.23514556884766, |
|
"learning_rate": 9.941600000000002e-06, |
|
"loss": 3.3845, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 140.5648193359375, |
|
"learning_rate": 9.921600000000002e-06, |
|
"loss": 3.1494, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 7.6923076923076925, |
|
"grad_norm": 117.4485855102539, |
|
"learning_rate": 9.901600000000002e-06, |
|
"loss": 3.0773, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 389.71490478515625, |
|
"learning_rate": 9.8816e-06, |
|
"loss": 3.2116, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 10.76923076923077, |
|
"grad_norm": 107.11251831054688, |
|
"learning_rate": 9.8616e-06, |
|
"loss": 3.0471, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 12.307692307692308, |
|
"grad_norm": 85.49571228027344, |
|
"learning_rate": 9.8416e-06, |
|
"loss": 3.0201, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 13.846153846153847, |
|
"grad_norm": 121.62274932861328, |
|
"learning_rate": 9.821600000000001e-06, |
|
"loss": 2.9355, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 15.384615384615385, |
|
"grad_norm": 64.66451263427734, |
|
"learning_rate": 9.801600000000001e-06, |
|
"loss": 2.9637, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 16.923076923076923, |
|
"grad_norm": 182.4657440185547, |
|
"learning_rate": 9.781600000000001e-06, |
|
"loss": 2.9819, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 18.46153846153846, |
|
"grad_norm": 97.80529022216797, |
|
"learning_rate": 9.761600000000002e-06, |
|
"loss": 2.9486, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 220.0562744140625, |
|
"learning_rate": 9.741600000000002e-06, |
|
"loss": 2.8608, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 21.53846153846154, |
|
"grad_norm": 95.53397369384766, |
|
"learning_rate": 9.7216e-06, |
|
"loss": 2.8322, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 23.076923076923077, |
|
"grad_norm": 67.54853057861328, |
|
"learning_rate": 9.7016e-06, |
|
"loss": 2.9429, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 24.615384615384617, |
|
"grad_norm": 214.53131103515625, |
|
"learning_rate": 9.6816e-06, |
|
"loss": 2.7927, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 26.153846153846153, |
|
"grad_norm": 293.3318786621094, |
|
"learning_rate": 9.6616e-06, |
|
"loss": 2.7665, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 27.692307692307693, |
|
"grad_norm": 216.3682861328125, |
|
"learning_rate": 9.641600000000001e-06, |
|
"loss": 2.8309, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 29.23076923076923, |
|
"grad_norm": 168.0605010986328, |
|
"learning_rate": 9.621600000000001e-06, |
|
"loss": 2.8433, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 30.76923076923077, |
|
"grad_norm": 103.49143981933594, |
|
"learning_rate": 9.601600000000001e-06, |
|
"loss": 2.7606, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 32.30769230769231, |
|
"grad_norm": 116.6761474609375, |
|
"learning_rate": 9.581600000000002e-06, |
|
"loss": 2.6595, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 33.84615384615385, |
|
"grad_norm": 176.0087432861328, |
|
"learning_rate": 9.5616e-06, |
|
"loss": 2.632, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 35.38461538461539, |
|
"grad_norm": 120.39679718017578, |
|
"learning_rate": 9.5416e-06, |
|
"loss": 2.5941, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 36.92307692307692, |
|
"grad_norm": 100.69256591796875, |
|
"learning_rate": 9.5216e-06, |
|
"loss": 2.6007, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 38.46153846153846, |
|
"grad_norm": 254.54440307617188, |
|
"learning_rate": 9.5016e-06, |
|
"loss": 2.5548, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 113.618896484375, |
|
"learning_rate": 9.4816e-06, |
|
"loss": 2.5157, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 41.53846153846154, |
|
"grad_norm": 825.5616455078125, |
|
"learning_rate": 9.461600000000001e-06, |
|
"loss": 2.4832, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 43.07692307692308, |
|
"grad_norm": 58.83940505981445, |
|
"learning_rate": 9.441600000000001e-06, |
|
"loss": 2.4393, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 44.61538461538461, |
|
"grad_norm": 89.66182708740234, |
|
"learning_rate": 9.421600000000001e-06, |
|
"loss": 2.4381, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 46.15384615384615, |
|
"grad_norm": 242.81410217285156, |
|
"learning_rate": 9.4016e-06, |
|
"loss": 2.3978, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 47.69230769230769, |
|
"grad_norm": 70.43364715576172, |
|
"learning_rate": 9.3816e-06, |
|
"loss": 2.3782, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 49.23076923076923, |
|
"grad_norm": 244.31356811523438, |
|
"learning_rate": 9.3616e-06, |
|
"loss": 2.3319, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 50.76923076923077, |
|
"grad_norm": 59.848697662353516, |
|
"learning_rate": 9.3416e-06, |
|
"loss": 2.3205, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 52.30769230769231, |
|
"grad_norm": 109.26374053955078, |
|
"learning_rate": 9.3216e-06, |
|
"loss": 2.2943, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 53.84615384615385, |
|
"grad_norm": 307.2849426269531, |
|
"learning_rate": 9.301600000000001e-06, |
|
"loss": 2.2913, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 55.38461538461539, |
|
"grad_norm": 69.93729400634766, |
|
"learning_rate": 9.281600000000001e-06, |
|
"loss": 2.2166, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 56.92307692307692, |
|
"grad_norm": 83.3631591796875, |
|
"learning_rate": 9.261600000000001e-06, |
|
"loss": 2.2718, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 58.46153846153846, |
|
"grad_norm": 48.52922821044922, |
|
"learning_rate": 9.2416e-06, |
|
"loss": 2.2645, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"grad_norm": 51.951324462890625, |
|
"learning_rate": 9.2218e-06, |
|
"loss": 2.2416, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 61.53846153846154, |
|
"grad_norm": 254.99429321289062, |
|
"learning_rate": 9.2018e-06, |
|
"loss": 2.2104, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 63.07692307692308, |
|
"grad_norm": 109.68157958984375, |
|
"learning_rate": 9.1818e-06, |
|
"loss": 2.1574, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 64.61538461538461, |
|
"grad_norm": 85.5855941772461, |
|
"learning_rate": 9.161800000000001e-06, |
|
"loss": 2.1513, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 66.15384615384616, |
|
"grad_norm": 63.89781951904297, |
|
"learning_rate": 9.141800000000001e-06, |
|
"loss": 2.1331, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 67.6923076923077, |
|
"grad_norm": 184.29612731933594, |
|
"learning_rate": 9.121800000000001e-06, |
|
"loss": 2.1866, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 69.23076923076923, |
|
"grad_norm": 108.59600830078125, |
|
"learning_rate": 9.1018e-06, |
|
"loss": 2.1453, |
|
"step": 4500 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 50000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 770, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.720094247936e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|