|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0729613733905579, |
|
"eval_steps": 500, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.988078206962327e-05, |
|
"loss": 1.7516, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.976156413924655e-05, |
|
"loss": 1.7798, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.964234620886982e-05, |
|
"loss": 1.7269, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9523128278493086e-05, |
|
"loss": 1.6977, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.940391034811636e-05, |
|
"loss": 1.6931, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.928469241773963e-05, |
|
"loss": 1.6307, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.91654744873629e-05, |
|
"loss": 1.6882, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.904625655698618e-05, |
|
"loss": 1.6711, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.8927038626609446e-05, |
|
"loss": 1.6717, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8807820696232716e-05, |
|
"loss": 1.6882, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.868860276585599e-05, |
|
"loss": 1.6379, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.856938483547926e-05, |
|
"loss": 1.6351, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.845016690510253e-05, |
|
"loss": 1.6685, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8330948974725806e-05, |
|
"loss": 1.632, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8211731044349076e-05, |
|
"loss": 1.6629, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8092513113972345e-05, |
|
"loss": 1.6639, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7973295183595614e-05, |
|
"loss": 1.5963, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.785407725321889e-05, |
|
"loss": 1.6739, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.773485932284216e-05, |
|
"loss": 1.6487, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.761564139246543e-05, |
|
"loss": 1.6607, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.74964234620887e-05, |
|
"loss": 1.6357, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7377205531711974e-05, |
|
"loss": 1.6406, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7257987601335244e-05, |
|
"loss": 1.6421, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.713876967095851e-05, |
|
"loss": 1.6822, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.701955174058178e-05, |
|
"loss": 1.6343, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.690033381020505e-05, |
|
"loss": 1.6222, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.678111587982833e-05, |
|
"loss": 1.6628, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.66618979494516e-05, |
|
"loss": 1.6068, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6542680019074866e-05, |
|
"loss": 1.6533, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.642346208869814e-05, |
|
"loss": 1.6371, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.630424415832141e-05, |
|
"loss": 1.6491, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.618502622794468e-05, |
|
"loss": 1.6218, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.606580829756796e-05, |
|
"loss": 1.6676, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.5946590367191226e-05, |
|
"loss": 1.5947, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5827372436814496e-05, |
|
"loss": 1.6307, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.570815450643777e-05, |
|
"loss": 1.609, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.558893657606104e-05, |
|
"loss": 1.6171, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.546971864568431e-05, |
|
"loss": 1.6125, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5350500715307586e-05, |
|
"loss": 1.6325, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5231282784930856e-05, |
|
"loss": 1.626, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5112064854554125e-05, |
|
"loss": 1.5777, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.49928469241774e-05, |
|
"loss": 1.6446, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.487362899380067e-05, |
|
"loss": 1.6189, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.475441106342394e-05, |
|
"loss": 1.6356, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4635193133047216e-05, |
|
"loss": 1.6001, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4515975202670485e-05, |
|
"loss": 1.6226, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4396757272293754e-05, |
|
"loss": 1.6109, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4277539341917024e-05, |
|
"loss": 1.6559, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.41583214115403e-05, |
|
"loss": 1.6108, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.403910348116357e-05, |
|
"loss": 1.6137, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.391988555078684e-05, |
|
"loss": 1.6332, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3800667620410114e-05, |
|
"loss": 1.6256, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3681449690033384e-05, |
|
"loss": 1.6519, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.356223175965665e-05, |
|
"loss": 1.6324, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.344301382927993e-05, |
|
"loss": 1.632, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.33237958989032e-05, |
|
"loss": 1.6378, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.320457796852647e-05, |
|
"loss": 1.6116, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3085360038149744e-05, |
|
"loss": 1.5908, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.296614210777301e-05, |
|
"loss": 1.6291, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.284692417739628e-05, |
|
"loss": 1.6607, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.272770624701956e-05, |
|
"loss": 1.6123, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.260848831664283e-05, |
|
"loss": 1.5969, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.24892703862661e-05, |
|
"loss": 1.6499, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.237005245588937e-05, |
|
"loss": 1.6096, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.225083452551264e-05, |
|
"loss": 1.6342, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.213161659513591e-05, |
|
"loss": 1.6238, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.201239866475918e-05, |
|
"loss": 1.6423, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.189318073438245e-05, |
|
"loss": 1.6435, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1773962804005726e-05, |
|
"loss": 1.6319, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1654744873628996e-05, |
|
"loss": 1.6271, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1535526943252265e-05, |
|
"loss": 1.6346, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1416309012875534e-05, |
|
"loss": 1.6444, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.129709108249881e-05, |
|
"loss": 1.637, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.117787315212208e-05, |
|
"loss": 1.6025, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.105865522174535e-05, |
|
"loss": 1.6256, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0939437291368625e-05, |
|
"loss": 1.6271, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0820219360991894e-05, |
|
"loss": 1.6333, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0701001430615164e-05, |
|
"loss": 1.6436, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.058178350023844e-05, |
|
"loss": 1.6509, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.046256556986171e-05, |
|
"loss": 1.6012, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.034334763948498e-05, |
|
"loss": 1.66, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.022412970910825e-05, |
|
"loss": 1.6011, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0104911778731524e-05, |
|
"loss": 1.591, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.998569384835479e-05, |
|
"loss": 1.6367, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.986647591797806e-05, |
|
"loss": 1.572, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.974725798760134e-05, |
|
"loss": 1.6407, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.962804005722461e-05, |
|
"loss": 1.6409, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.950882212684788e-05, |
|
"loss": 1.637, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.938960419647115e-05, |
|
"loss": 1.649, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.927038626609442e-05, |
|
"loss": 1.6672, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.915116833571769e-05, |
|
"loss": 1.6262, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.903195040534097e-05, |
|
"loss": 1.596, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.891273247496424e-05, |
|
"loss": 1.6023, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8793514544587506e-05, |
|
"loss": 1.645, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.867429661421078e-05, |
|
"loss": 1.6276, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.855507868383405e-05, |
|
"loss": 1.6153, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.843586075345732e-05, |
|
"loss": 1.6322, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.83166428230806e-05, |
|
"loss": 1.6123, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8197424892703866e-05, |
|
"loss": 1.6155, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8078206962327136e-05, |
|
"loss": 1.5998, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.795898903195041e-05, |
|
"loss": 1.6432, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.783977110157368e-05, |
|
"loss": 1.6383, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.772055317119695e-05, |
|
"loss": 1.6195, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7601335240820226e-05, |
|
"loss": 1.5837, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7482117310443496e-05, |
|
"loss": 1.5957, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7362899380066765e-05, |
|
"loss": 1.6522, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.724368144969004e-05, |
|
"loss": 1.6309, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.712446351931331e-05, |
|
"loss": 1.6618, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.700524558893658e-05, |
|
"loss": 1.6465, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.688602765855985e-05, |
|
"loss": 1.6165, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.676680972818312e-05, |
|
"loss": 1.6425, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6647591797806394e-05, |
|
"loss": 1.6341, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6528373867429664e-05, |
|
"loss": 1.6277, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.640915593705293e-05, |
|
"loss": 1.6182, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.62899380066762e-05, |
|
"loss": 1.639, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.617072007629947e-05, |
|
"loss": 1.6185, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.605150214592275e-05, |
|
"loss": 1.6146, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.593228421554602e-05, |
|
"loss": 1.6149, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5813066285169286e-05, |
|
"loss": 1.6699, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.569384835479256e-05, |
|
"loss": 1.6413, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.557463042441583e-05, |
|
"loss": 1.6124, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.54554124940391e-05, |
|
"loss": 1.6246, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.533619456366238e-05, |
|
"loss": 1.6301, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5216976633285646e-05, |
|
"loss": 1.6356, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5097758702908915e-05, |
|
"loss": 1.6053, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.497854077253219e-05, |
|
"loss": 1.6313, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.485932284215546e-05, |
|
"loss": 1.6578, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.474010491177873e-05, |
|
"loss": 1.6059, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4620886981402006e-05, |
|
"loss": 1.6687, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4501669051025275e-05, |
|
"loss": 1.6502, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4382451120648545e-05, |
|
"loss": 1.5909, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.426323319027182e-05, |
|
"loss": 1.6133, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.414401525989509e-05, |
|
"loss": 1.6187, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.402479732951836e-05, |
|
"loss": 1.6338, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3905579399141636e-05, |
|
"loss": 1.6437, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3786361468764905e-05, |
|
"loss": 1.6663, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3667143538388174e-05, |
|
"loss": 1.6262, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.354792560801145e-05, |
|
"loss": 1.5997, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.342870767763472e-05, |
|
"loss": 1.6521, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.330948974725799e-05, |
|
"loss": 1.6198, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3190271816881265e-05, |
|
"loss": 1.608, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3071053886504534e-05, |
|
"loss": 1.6347, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.2951835956127803e-05, |
|
"loss": 1.5617, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.283261802575107e-05, |
|
"loss": 1.6609, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.271340009537435e-05, |
|
"loss": 1.6014, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.259418216499762e-05, |
|
"loss": 1.6069, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.247496423462089e-05, |
|
"loss": 1.6421, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2355746304244164e-05, |
|
"loss": 1.6507, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.223652837386743e-05, |
|
"loss": 1.6048, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.21173104434907e-05, |
|
"loss": 1.6274, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4194, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 9.47879226680279e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|