|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 25070, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0997207818109294, |
|
"grad_norm": 2.346997022628784, |
|
"learning_rate": 4.5023932987634625e-05, |
|
"loss": 8.2424, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1994415636218588, |
|
"grad_norm": 2.3684158325195312, |
|
"learning_rate": 4.0037893897088155e-05, |
|
"loss": 7.6851, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2991623454327882, |
|
"grad_norm": 3.409303665161133, |
|
"learning_rate": 3.5051854806541686e-05, |
|
"loss": 7.4872, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.3988831272437176, |
|
"grad_norm": 2.615360975265503, |
|
"learning_rate": 3.0065815715995216e-05, |
|
"loss": 7.344, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.49860390905464697, |
|
"grad_norm": 3.5242176055908203, |
|
"learning_rate": 2.5079776625448743e-05, |
|
"loss": 7.2749, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.5983246908655764, |
|
"grad_norm": 3.690262794494629, |
|
"learning_rate": 2.0093737534902273e-05, |
|
"loss": 7.1657, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6980454726765057, |
|
"grad_norm": 2.940692663192749, |
|
"learning_rate": 1.5107698444355806e-05, |
|
"loss": 7.1298, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.7977662544874352, |
|
"grad_norm": 2.9132378101348877, |
|
"learning_rate": 1.0121659353809334e-05, |
|
"loss": 7.0938, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.8974870362983646, |
|
"grad_norm": 3.101921558380127, |
|
"learning_rate": 5.135620263262864e-06, |
|
"loss": 7.0715, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.9972078181092939, |
|
"grad_norm": 3.2258358001708984, |
|
"learning_rate": 1.495811727163941e-07, |
|
"loss": 7.0478, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.0969285999202234, |
|
"grad_norm": 3.2722208499908447, |
|
"learning_rate": 3.903270841643399e-05, |
|
"loss": 7.0374, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.1966493817311528, |
|
"grad_norm": 5.218217849731445, |
|
"learning_rate": 3.803550059832469e-05, |
|
"loss": 7.0289, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.2963701635420821, |
|
"grad_norm": 3.466571807861328, |
|
"learning_rate": 3.70382927802154e-05, |
|
"loss": 6.9595, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.3960909453530115, |
|
"grad_norm": 3.688443183898926, |
|
"learning_rate": 3.6041084962106106e-05, |
|
"loss": 6.9267, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.4958117271639408, |
|
"grad_norm": 3.0426700115203857, |
|
"learning_rate": 3.504387714399681e-05, |
|
"loss": 6.8954, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.5955325089748702, |
|
"grad_norm": 3.7769949436187744, |
|
"learning_rate": 3.404666932588751e-05, |
|
"loss": 6.8657, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.6952532907857998, |
|
"grad_norm": 3.0776305198669434, |
|
"learning_rate": 3.304946150777822e-05, |
|
"loss": 6.8285, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.7949740725967291, |
|
"grad_norm": 3.350515604019165, |
|
"learning_rate": 3.2052253689668926e-05, |
|
"loss": 6.7948, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.8946948544076585, |
|
"grad_norm": 3.393035411834717, |
|
"learning_rate": 3.1055045871559636e-05, |
|
"loss": 6.7725, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.994415636218588, |
|
"grad_norm": 3.438401222229004, |
|
"learning_rate": 3.0057838053450336e-05, |
|
"loss": 6.7484, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.0941364180295174, |
|
"grad_norm": 4.042023181915283, |
|
"learning_rate": 2.9060630235341047e-05, |
|
"loss": 6.6939, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.193857199840447, |
|
"grad_norm": 3.3481028079986572, |
|
"learning_rate": 2.8063422417231757e-05, |
|
"loss": 6.6854, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.293577981651376, |
|
"grad_norm": 3.266961097717285, |
|
"learning_rate": 2.706820901475868e-05, |
|
"loss": 6.6555, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.3932987634623055, |
|
"grad_norm": 3.215405225753784, |
|
"learning_rate": 2.607100119664938e-05, |
|
"loss": 6.6713, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.493019545273235, |
|
"grad_norm": 3.380500316619873, |
|
"learning_rate": 2.507379337854009e-05, |
|
"loss": 6.6581, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.5927403270841642, |
|
"grad_norm": 3.536166191101074, |
|
"learning_rate": 2.4076585560430796e-05, |
|
"loss": 6.5945, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.6924611088950936, |
|
"grad_norm": 3.9319474697113037, |
|
"learning_rate": 2.30793777423215e-05, |
|
"loss": 6.6057, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.792181890706023, |
|
"grad_norm": 4.334239482879639, |
|
"learning_rate": 2.2084164339848425e-05, |
|
"loss": 6.5818, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.8919026725169523, |
|
"grad_norm": 4.093286514282227, |
|
"learning_rate": 2.1086956521739132e-05, |
|
"loss": 6.5732, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.9916234543278817, |
|
"grad_norm": 4.026576995849609, |
|
"learning_rate": 2.008974870362984e-05, |
|
"loss": 6.5627, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 3.0913442361388115, |
|
"grad_norm": 3.7285637855529785, |
|
"learning_rate": 1.9092540885520542e-05, |
|
"loss": 6.5268, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 3.191065017949741, |
|
"grad_norm": 3.7349226474761963, |
|
"learning_rate": 1.809533306741125e-05, |
|
"loss": 6.5388, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 3.29078579976067, |
|
"grad_norm": 3.5330066680908203, |
|
"learning_rate": 1.7098125249301956e-05, |
|
"loss": 6.5141, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 3.3905065815715996, |
|
"grad_norm": 3.6961631774902344, |
|
"learning_rate": 1.6100917431192662e-05, |
|
"loss": 6.5013, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.490227363382529, |
|
"grad_norm": 3.413053274154663, |
|
"learning_rate": 1.5103709613083367e-05, |
|
"loss": 6.4932, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.5899481451934583, |
|
"grad_norm": 4.584457874298096, |
|
"learning_rate": 1.4108496210610292e-05, |
|
"loss": 6.4695, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.6896689270043876, |
|
"grad_norm": 3.3078787326812744, |
|
"learning_rate": 1.3111288392500998e-05, |
|
"loss": 6.4711, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.789389708815317, |
|
"grad_norm": 3.6679279804229736, |
|
"learning_rate": 1.2114080574391703e-05, |
|
"loss": 6.466, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.8891104906262464, |
|
"grad_norm": 4.358784198760986, |
|
"learning_rate": 1.1116872756282408e-05, |
|
"loss": 6.4568, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.988831272437176, |
|
"grad_norm": 4.014244556427002, |
|
"learning_rate": 1.0119664938173115e-05, |
|
"loss": 6.4536, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 4.0885520542481055, |
|
"grad_norm": 3.8396079540252686, |
|
"learning_rate": 9.122457120063822e-06, |
|
"loss": 6.443, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 4.188272836059035, |
|
"grad_norm": 3.850647449493408, |
|
"learning_rate": 8.125249301954529e-06, |
|
"loss": 6.4186, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 4.287993617869964, |
|
"grad_norm": 3.829951047897339, |
|
"learning_rate": 7.128041483845234e-06, |
|
"loss": 6.4178, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 4.387714399680894, |
|
"grad_norm": 3.5512278079986572, |
|
"learning_rate": 6.132828081372159e-06, |
|
"loss": 6.4055, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 4.487435181491823, |
|
"grad_norm": 3.568665027618408, |
|
"learning_rate": 5.135620263262864e-06, |
|
"loss": 6.4076, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.587155963302752, |
|
"grad_norm": 3.71463942527771, |
|
"learning_rate": 4.13841244515357e-06, |
|
"loss": 6.4086, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 4.686876745113682, |
|
"grad_norm": 3.9615983963012695, |
|
"learning_rate": 3.1412046270442757e-06, |
|
"loss": 6.4061, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 4.786597526924611, |
|
"grad_norm": 4.0287909507751465, |
|
"learning_rate": 2.1459912245712007e-06, |
|
"loss": 6.3772, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 4.88631830873554, |
|
"grad_norm": 4.012565612792969, |
|
"learning_rate": 1.1487834064619066e-06, |
|
"loss": 6.3956, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 4.98603909054647, |
|
"grad_norm": 4.36814022064209, |
|
"learning_rate": 1.515755883526127e-07, |
|
"loss": 6.3996, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 25070, |
|
"total_flos": 2.639861525017728e+16, |
|
"train_loss": 5.285465436767286, |
|
"train_runtime": 6500.188, |
|
"train_samples_per_second": 61.705, |
|
"train_steps_per_second": 3.857 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 25070, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.639861525017728e+16, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|