|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 27270, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.18335166850018336, |
|
"grad_norm": 2.6387195587158203, |
|
"learning_rate": 4.908324165749908e-05, |
|
"loss": 2.1955, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3667033370003667, |
|
"grad_norm": 2.266592025756836, |
|
"learning_rate": 4.816648331499817e-05, |
|
"loss": 2.0932, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5500550055005501, |
|
"grad_norm": 3.0838563442230225, |
|
"learning_rate": 4.724972497249725e-05, |
|
"loss": 2.048, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.7334066740007334, |
|
"grad_norm": 2.5882773399353027, |
|
"learning_rate": 4.633296662999633e-05, |
|
"loss": 2.0419, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.9167583425009168, |
|
"grad_norm": 2.441417694091797, |
|
"learning_rate": 4.541620828749542e-05, |
|
"loss": 2.0299, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.1001100110011002, |
|
"grad_norm": 2.2205169200897217, |
|
"learning_rate": 4.449944994499451e-05, |
|
"loss": 1.9854, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.2834616795012834, |
|
"grad_norm": 2.802492380142212, |
|
"learning_rate": 4.358269160249359e-05, |
|
"loss": 1.9621, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.466813348001467, |
|
"grad_norm": 6.791469573974609, |
|
"learning_rate": 4.266593325999267e-05, |
|
"loss": 1.957, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.6501650165016502, |
|
"grad_norm": 2.5560526847839355, |
|
"learning_rate": 4.174917491749175e-05, |
|
"loss": 1.9447, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.8335166850018334, |
|
"grad_norm": 2.623948812484741, |
|
"learning_rate": 4.0832416574990836e-05, |
|
"loss": 1.9517, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.0168683535020167, |
|
"grad_norm": 2.395932674407959, |
|
"learning_rate": 3.991565823248992e-05, |
|
"loss": 1.9559, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.2002200220022003, |
|
"grad_norm": 2.153954267501831, |
|
"learning_rate": 3.8998899889989e-05, |
|
"loss": 1.9099, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.3835716905023836, |
|
"grad_norm": 2.0535569190979004, |
|
"learning_rate": 3.808214154748808e-05, |
|
"loss": 1.8994, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.566923359002567, |
|
"grad_norm": 2.264138698577881, |
|
"learning_rate": 3.716538320498717e-05, |
|
"loss": 1.9069, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.7502750275027505, |
|
"grad_norm": 1.9639852046966553, |
|
"learning_rate": 3.624862486248625e-05, |
|
"loss": 1.9109, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.933626696002934, |
|
"grad_norm": 1.975576639175415, |
|
"learning_rate": 3.5331866519985334e-05, |
|
"loss": 1.9207, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.116978364503117, |
|
"grad_norm": 1.8623844385147095, |
|
"learning_rate": 3.4415108177484414e-05, |
|
"loss": 1.8771, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.3003300330033003, |
|
"grad_norm": 2.512981653213501, |
|
"learning_rate": 3.34983498349835e-05, |
|
"loss": 1.8801, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.4836817015034836, |
|
"grad_norm": 2.324370861053467, |
|
"learning_rate": 3.258159149248258e-05, |
|
"loss": 1.8746, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 3.667033370003667, |
|
"grad_norm": 2.3853843212127686, |
|
"learning_rate": 3.166483314998166e-05, |
|
"loss": 1.8549, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.8503850385038505, |
|
"grad_norm": 1.7555441856384277, |
|
"learning_rate": 3.074807480748075e-05, |
|
"loss": 1.8724, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 4.033736707004033, |
|
"grad_norm": 2.166177749633789, |
|
"learning_rate": 2.983131646497983e-05, |
|
"loss": 1.8574, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 4.2170883755042174, |
|
"grad_norm": 2.927156925201416, |
|
"learning_rate": 2.891455812247892e-05, |
|
"loss": 1.8507, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 4.400440044004401, |
|
"grad_norm": 2.2842493057250977, |
|
"learning_rate": 2.7997799779978003e-05, |
|
"loss": 1.8447, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 4.583791712504584, |
|
"grad_norm": 2.1565608978271484, |
|
"learning_rate": 2.7081041437477084e-05, |
|
"loss": 1.8359, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 4.767143381004767, |
|
"grad_norm": 2.3850159645080566, |
|
"learning_rate": 2.6164283094976168e-05, |
|
"loss": 1.8413, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 4.9504950495049505, |
|
"grad_norm": 2.2318992614746094, |
|
"learning_rate": 2.5247524752475248e-05, |
|
"loss": 1.8397, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 5.133846718005134, |
|
"grad_norm": 2.347505569458008, |
|
"learning_rate": 2.4330766409974332e-05, |
|
"loss": 1.8196, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 5.317198386505317, |
|
"grad_norm": 2.1487085819244385, |
|
"learning_rate": 2.3414008067473413e-05, |
|
"loss": 1.8309, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 5.5005500550055, |
|
"grad_norm": 2.21701717376709, |
|
"learning_rate": 2.24972497249725e-05, |
|
"loss": 1.8212, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 5.683901723505684, |
|
"grad_norm": 2.2371625900268555, |
|
"learning_rate": 2.158049138247158e-05, |
|
"loss": 1.8148, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 5.867253392005868, |
|
"grad_norm": 2.157254457473755, |
|
"learning_rate": 2.0663733039970665e-05, |
|
"loss": 1.83, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 6.050605060506051, |
|
"grad_norm": 2.467193603515625, |
|
"learning_rate": 1.9746974697469746e-05, |
|
"loss": 1.8115, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 6.233956729006234, |
|
"grad_norm": 2.3348140716552734, |
|
"learning_rate": 1.883021635496883e-05, |
|
"loss": 1.8037, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 6.417308397506417, |
|
"grad_norm": 2.6734750270843506, |
|
"learning_rate": 1.7913458012467914e-05, |
|
"loss": 1.805, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 6.600660066006601, |
|
"grad_norm": 2.0465357303619385, |
|
"learning_rate": 1.6996699669966998e-05, |
|
"loss": 1.8014, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 6.784011734506784, |
|
"grad_norm": 2.7078983783721924, |
|
"learning_rate": 1.6079941327466082e-05, |
|
"loss": 1.7988, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 6.967363403006967, |
|
"grad_norm": 2.771340847015381, |
|
"learning_rate": 1.5163182984965163e-05, |
|
"loss": 1.796, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 7.15071507150715, |
|
"grad_norm": 2.8874900341033936, |
|
"learning_rate": 1.4246424642464248e-05, |
|
"loss": 1.7986, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 7.334066740007334, |
|
"grad_norm": 2.211998701095581, |
|
"learning_rate": 1.3329666299963331e-05, |
|
"loss": 1.7963, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 7.517418408507518, |
|
"grad_norm": 2.0423145294189453, |
|
"learning_rate": 1.2412907957462413e-05, |
|
"loss": 1.7784, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 7.700770077007701, |
|
"grad_norm": 2.0297274589538574, |
|
"learning_rate": 1.1496149614961496e-05, |
|
"loss": 1.7987, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 7.884121745507884, |
|
"grad_norm": 2.6104135513305664, |
|
"learning_rate": 1.057939127246058e-05, |
|
"loss": 1.7847, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 8.067473414008067, |
|
"grad_norm": 2.4602534770965576, |
|
"learning_rate": 9.662632929959662e-06, |
|
"loss": 1.7903, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 8.250825082508252, |
|
"grad_norm": 2.1262950897216797, |
|
"learning_rate": 8.745874587458746e-06, |
|
"loss": 1.795, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 8.434176751008435, |
|
"grad_norm": 2.538703680038452, |
|
"learning_rate": 7.829116244957828e-06, |
|
"loss": 1.7708, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 8.617528419508618, |
|
"grad_norm": 2.607891321182251, |
|
"learning_rate": 6.912357902456913e-06, |
|
"loss": 1.7824, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 8.800880088008801, |
|
"grad_norm": 2.0636353492736816, |
|
"learning_rate": 5.995599559955996e-06, |
|
"loss": 1.7651, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 8.984231756508985, |
|
"grad_norm": 2.1311886310577393, |
|
"learning_rate": 5.078841217455079e-06, |
|
"loss": 1.7825, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 9.167583425009168, |
|
"grad_norm": 2.185648202896118, |
|
"learning_rate": 4.162082874954162e-06, |
|
"loss": 1.7744, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 9.350935093509351, |
|
"grad_norm": 2.149644374847412, |
|
"learning_rate": 3.2453245324532458e-06, |
|
"loss": 1.781, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 9.534286762009534, |
|
"grad_norm": 2.1185684204101562, |
|
"learning_rate": 2.3285661899523286e-06, |
|
"loss": 1.7712, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 9.717638430509718, |
|
"grad_norm": 2.186683416366577, |
|
"learning_rate": 1.411807847451412e-06, |
|
"loss": 1.7739, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 9.900990099009901, |
|
"grad_norm": 2.319247007369995, |
|
"learning_rate": 4.950495049504951e-07, |
|
"loss": 1.7713, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 27270, |
|
"total_flos": 2.615057185279181e+16, |
|
"train_loss": 1.8617943959923335, |
|
"train_runtime": 1938.923, |
|
"train_samples_per_second": 112.49, |
|
"train_steps_per_second": 14.065 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 27270, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.615057185279181e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|