|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9980481457384514, |
|
"eval_steps": 500, |
|
"global_step": 4320, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 3.0678, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 2.3204, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999887567062682e-05, |
|
"loss": 1.9946, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998622986851668e-05, |
|
"loss": 1.8239, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9959550682646195e-05, |
|
"loss": 1.7682, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9918875604567105e-05, |
|
"loss": 1.6983, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9864261793886955e-05, |
|
"loss": 1.6675, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9795785997944227e-05, |
|
"loss": 1.6711, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.971354444395734e-05, |
|
"loss": 1.6544, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9617652703799102e-05, |
|
"loss": 1.6135, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9508245531586613e-05, |
|
"loss": 1.6339, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9385476674314883e-05, |
|
"loss": 1.5796, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9249518655800262e-05, |
|
"loss": 1.5763, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9100562534237318e-05, |
|
"loss": 1.5615, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8938817633709796e-05, |
|
"loss": 1.5427, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8764511250033053e-05, |
|
"loss": 1.5316, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8577888331341302e-05, |
|
"loss": 1.5422, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.837921113386847e-05, |
|
"loss": 1.5417, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.816875885340649e-05, |
|
"loss": 1.5356, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7946827232958858e-05, |
|
"loss": 1.5343, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7713728147140857e-05, |
|
"loss": 1.5125, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7469789163910416e-05, |
|
"loss": 1.5159, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7215353084245564e-05, |
|
"loss": 1.5107, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6950777460415324e-05, |
|
"loss": 1.5229, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.667643409352098e-05, |
|
"loss": 1.4952, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6392708511013874e-05, |
|
"loss": 1.5025, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6099999424923913e-05, |
|
"loss": 1.488, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5798718171560124e-05, |
|
"loss": 1.5125, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.548928813347067e-05, |
|
"loss": 1.506, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5172144144474582e-05, |
|
"loss": 1.4895, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4847731878601348e-05, |
|
"loss": 1.4411, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4516507223797018e-05, |
|
"loss": 1.4307, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4178935641276982e-05, |
|
"loss": 1.4289, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3835491511425678e-05, |
|
"loss": 1.4333, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3486657467162423e-05, |
|
"loss": 1.4518, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3132923715710191e-05, |
|
"loss": 1.4258, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.277478734972041e-05, |
|
"loss": 1.4344, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2412751648721855e-05, |
|
"loss": 1.434, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2047325371875286e-05, |
|
"loss": 1.4473, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1679022043027706e-05, |
|
"loss": 1.4234, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1308359229070914e-05, |
|
"loss": 1.4298, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.093585781261852e-05, |
|
"loss": 1.4147, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0562041260023406e-05, |
|
"loss": 1.4194, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.018743488576437e-05, |
|
"loss": 1.4264, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.812565114235635e-06, |
|
"loss": 1.4327, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.437958739976597e-06, |
|
"loss": 1.4231, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.064142187381482e-06, |
|
"loss": 1.397, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.691640770929086e-06, |
|
"loss": 1.4213, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.3209779569723e-06, |
|
"loss": 1.3827, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.952674628124715e-06, |
|
"loss": 1.4029, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.587248351278149e-06, |
|
"loss": 1.4026, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.2252126502795935e-06, |
|
"loss": 1.4032, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.867076284289811e-06, |
|
"loss": 1.4065, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.513342532837581e-06, |
|
"loss": 1.4033, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.164508488574326e-06, |
|
"loss": 1.4236, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.821064358723021e-06, |
|
"loss": 1.4105, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.483492776202986e-06, |
|
"loss": 1.4443, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.152268121398656e-06, |
|
"loss": 1.3914, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.827855855525423e-06, |
|
"loss": 1.3694, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.510711866529335e-06, |
|
"loss": 1.3685, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.201281828439878e-06, |
|
"loss": 1.38, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.9000005750760915e-06, |
|
"loss": 1.3944, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6072914889861287e-06, |
|
"loss": 1.3632, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.323565906479024e-06, |
|
"loss": 1.3923, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0492225395846777e-06, |
|
"loss": 1.3598, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7846469157544353e-06, |
|
"loss": 1.3582, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.530210836089586e-06, |
|
"loss": 1.4019, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.2862718528591434e-06, |
|
"loss": 1.3822, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0531727670411418e-06, |
|
"loss": 1.4255, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.831241146593512e-06, |
|
"loss": 1.3501, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.620788866131533e-06, |
|
"loss": 1.3792, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4221116686587e-06, |
|
"loss": 1.3632, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2354887499669487e-06, |
|
"loss": 1.389, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0611823662902088e-06, |
|
"loss": 1.3764, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.994374657626847e-07, |
|
"loss": 1.3873, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.504813441997382e-07, |
|
"loss": 1.3481, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.145233256851202e-07, |
|
"loss": 1.3685, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.91754468413388e-07, |
|
"loss": 1.3712, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.823472962008978e-07, |
|
"loss": 1.3702, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.864555560426596e-07, |
|
"loss": 1.3731, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0421400205577591e-07, |
|
"loss": 1.392, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.3573820611304745e-07, |
|
"loss": 1.3628, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.112439543289663e-08, |
|
"loss": 1.367, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.044931735380564e-08, |
|
"loss": 1.371, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.3770131483322734e-08, |
|
"loss": 1.3728, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.1243293731799665e-09, |
|
"loss": 1.3754, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 4320, |
|
"total_flos": 4.683004912987013e+17, |
|
"train_loss": 1.4905582423563357, |
|
"train_runtime": 19585.3727, |
|
"train_samples_per_second": 14.126, |
|
"train_steps_per_second": 0.221 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 4320, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 1200, |
|
"total_flos": 4.683004912987013e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|