|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 25.0, |
|
"eval_steps": 500, |
|
"global_step": 14925, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.8375209380234506, |
|
"grad_norm": 0.8667312860488892, |
|
"learning_rate": 4.581239530988275e-05, |
|
"loss": 7.6874, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.675041876046901, |
|
"grad_norm": 1.0515320301055908, |
|
"learning_rate": 4.16247906197655e-05, |
|
"loss": 7.1236, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.5125628140703515, |
|
"grad_norm": 0.9544970393180847, |
|
"learning_rate": 3.7437185929648245e-05, |
|
"loss": 6.9511, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.3500837520938025, |
|
"grad_norm": 1.0608201026916504, |
|
"learning_rate": 3.324958123953099e-05, |
|
"loss": 6.8374, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.187604690117253, |
|
"grad_norm": 1.3822133541107178, |
|
"learning_rate": 2.906197654941374e-05, |
|
"loss": 6.7455, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.025125628140704, |
|
"grad_norm": 1.3496253490447998, |
|
"learning_rate": 2.4874371859296484e-05, |
|
"loss": 6.6841, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 5.8626465661641545, |
|
"grad_norm": 1.0734386444091797, |
|
"learning_rate": 2.0686767169179232e-05, |
|
"loss": 6.6407, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 6.700167504187605, |
|
"grad_norm": 1.4356346130371094, |
|
"learning_rate": 1.6499162479061976e-05, |
|
"loss": 6.6059, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 7.5376884422110555, |
|
"grad_norm": 1.3260369300842285, |
|
"learning_rate": 1.2311557788944725e-05, |
|
"loss": 6.5782, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 8.375209380234505, |
|
"grad_norm": 1.589943528175354, |
|
"learning_rate": 8.123953098827471e-06, |
|
"loss": 6.5581, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 9.212730318257956, |
|
"grad_norm": 1.1932653188705444, |
|
"learning_rate": 3.936348408710218e-06, |
|
"loss": 6.5496, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 5970, |
|
"total_flos": 7568723438714880.0, |
|
"train_loss": 6.792878454773869, |
|
"train_runtime": 2074.686, |
|
"train_samples_per_second": 92.024, |
|
"train_steps_per_second": 2.878 |
|
}, |
|
{ |
|
"epoch": 10.050251256281408, |
|
"grad_norm": 1.6812041997909546, |
|
"learning_rate": 4.9874371859296486e-05, |
|
"loss": 6.5362, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 10.887772194304858, |
|
"grad_norm": 1.7040314674377441, |
|
"learning_rate": 4.778056951423786e-05, |
|
"loss": 6.542, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 11.725293132328309, |
|
"grad_norm": 1.5005360841751099, |
|
"learning_rate": 4.568676716917923e-05, |
|
"loss": 6.4812, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 12.56281407035176, |
|
"grad_norm": 2.1963107585906982, |
|
"learning_rate": 4.35929648241206e-05, |
|
"loss": 6.4312, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 13.40033500837521, |
|
"grad_norm": 1.4487565755844116, |
|
"learning_rate": 4.1499162479061974e-05, |
|
"loss": 6.4049, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 14.23785594639866, |
|
"grad_norm": 1.7677375078201294, |
|
"learning_rate": 3.940536013400335e-05, |
|
"loss": 6.3664, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 15.075376884422111, |
|
"grad_norm": 1.8846427202224731, |
|
"learning_rate": 3.731155778894472e-05, |
|
"loss": 6.3339, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 15.912897822445562, |
|
"grad_norm": 1.711972713470459, |
|
"learning_rate": 3.5217755443886095e-05, |
|
"loss": 6.3093, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 16.75041876046901, |
|
"grad_norm": 1.8777658939361572, |
|
"learning_rate": 3.312395309882747e-05, |
|
"loss": 6.2826, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 17.587939698492463, |
|
"grad_norm": 1.5985726118087769, |
|
"learning_rate": 3.103015075376884e-05, |
|
"loss": 6.2612, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 18.42546063651591, |
|
"grad_norm": 1.8098769187927246, |
|
"learning_rate": 2.8936348408710216e-05, |
|
"loss": 6.2504, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 19.262981574539364, |
|
"grad_norm": 1.4644687175750732, |
|
"learning_rate": 2.684254606365159e-05, |
|
"loss": 6.2358, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 11940, |
|
"total_flos": 1.513744687742976e+16, |
|
"train_loss": 3.172654545207319, |
|
"train_runtime": 2061.7586, |
|
"train_samples_per_second": 185.201, |
|
"train_steps_per_second": 5.791 |
|
}, |
|
{ |
|
"epoch": 20.100502512562816, |
|
"grad_norm": 1.9149502515792847, |
|
"learning_rate": 4.9798994974874375e-05, |
|
"loss": 6.2133, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 20.938023450586265, |
|
"grad_norm": 1.8770983219146729, |
|
"learning_rate": 4.8123953098827474e-05, |
|
"loss": 6.222, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 21.775544388609717, |
|
"grad_norm": 1.8764543533325195, |
|
"learning_rate": 4.6448911222780573e-05, |
|
"loss": 6.1987, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 22.613065326633166, |
|
"grad_norm": 1.928965449333191, |
|
"learning_rate": 4.477386934673367e-05, |
|
"loss": 6.1787, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 23.450586264656618, |
|
"grad_norm": 1.6476266384124756, |
|
"learning_rate": 4.309882747068677e-05, |
|
"loss": 6.1524, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 24.288107202680067, |
|
"grad_norm": 1.7504838705062866, |
|
"learning_rate": 4.142378559463987e-05, |
|
"loss": 6.1409, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"step": 14925, |
|
"total_flos": 1.89218085967872e+16, |
|
"train_loss": 1.2342067862914834, |
|
"train_runtime": 1036.8459, |
|
"train_samples_per_second": 460.338, |
|
"train_steps_per_second": 14.395 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 14925, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 25, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.89218085967872e+16, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|