|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 24495, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.898550724637682e-05, |
|
"loss": 5.4391, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.79648907940396e-05, |
|
"loss": 3.2254, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.694427434170239e-05, |
|
"loss": 2.7119, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.592365788936518e-05, |
|
"loss": 2.4259, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.490304143702796e-05, |
|
"loss": 2.206, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3884466217595436e-05, |
|
"loss": 2.1015, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.286384976525822e-05, |
|
"loss": 1.99, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.184527454582568e-05, |
|
"loss": 1.9223, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.082465809348847e-05, |
|
"loss": 1.8566, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9804041641151255e-05, |
|
"loss": 1.7873, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8783425188814046e-05, |
|
"loss": 1.7276, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.776280873647684e-05, |
|
"loss": 1.6822, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.674219228413962e-05, |
|
"loss": 1.6448, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.572157583180241e-05, |
|
"loss": 1.5829, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.47009593794652e-05, |
|
"loss": 1.5763, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.368034292712799e-05, |
|
"loss": 1.5781, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.265972647479077e-05, |
|
"loss": 1.5427, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1639110022453563e-05, |
|
"loss": 1.5025, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0618493570116355e-05, |
|
"loss": 1.4859, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.959787711777914e-05, |
|
"loss": 1.4483, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.857726066544193e-05, |
|
"loss": 1.4282, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7558685446009393e-05, |
|
"loss": 1.4401, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6538068993672177e-05, |
|
"loss": 1.3864, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5517452541334968e-05, |
|
"loss": 1.4, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4496836088997756e-05, |
|
"loss": 1.3831, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3476219636660544e-05, |
|
"loss": 1.3708, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.245560318432333e-05, |
|
"loss": 1.3737, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.143498673198612e-05, |
|
"loss": 1.3142, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.041437027964891e-05, |
|
"loss": 1.333, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.939579506021637e-05, |
|
"loss": 1.3242, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.837517860787916e-05, |
|
"loss": 1.3056, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7356603388446624e-05, |
|
"loss": 1.2832, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.633598693610941e-05, |
|
"loss": 1.2872, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.53153704837722e-05, |
|
"loss": 1.2703, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4294754031434987e-05, |
|
"loss": 1.261, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3274137579097776e-05, |
|
"loss": 1.2512, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2253521126760564e-05, |
|
"loss": 1.2212, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1232904674423353e-05, |
|
"loss": 1.2351, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0212288222086141e-05, |
|
"loss": 1.2337, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.191671769748929e-06, |
|
"loss": 1.2258, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.171055317411718e-06, |
|
"loss": 1.2216, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.150438865074505e-06, |
|
"loss": 1.2057, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.129822412737294e-06, |
|
"loss": 1.2135, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.111247193304756e-06, |
|
"loss": 1.2054, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.090630740967544e-06, |
|
"loss": 1.2021, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0700142886303327e-06, |
|
"loss": 1.1868, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.049397836293121e-06, |
|
"loss": 1.1735, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0308226168605838e-06, |
|
"loss": 1.2107, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 24495, |
|
"total_flos": 1.290813800478588e+16, |
|
"train_loss": 1.6035245578856974, |
|
"train_runtime": 14452.6928, |
|
"train_samples_per_second": 1.695, |
|
"train_steps_per_second": 1.695 |
|
} |
|
], |
|
"max_steps": 24495, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.290813800478588e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|