|
{ |
|
"best_metric": 0.9405358686257562, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-3660", |
|
"epoch": 9.981561155500922, |
|
"eval_steps": 500, |
|
"global_step": 4060, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2315270935960593e-06, |
|
"loss": 1.8272, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.4630541871921186e-06, |
|
"loss": 1.7835, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.6945812807881777e-06, |
|
"loss": 1.751, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.926108374384237e-06, |
|
"loss": 1.7539, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.157635467980296e-06, |
|
"loss": 1.7125, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.3891625615763555e-06, |
|
"loss": 1.6708, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.620689655172414e-06, |
|
"loss": 1.7727, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.852216748768475e-06, |
|
"loss": 1.6377, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.1083743842364533e-05, |
|
"loss": 1.8024, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.2315270935960592e-05, |
|
"loss": 1.7476, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.354679802955665e-05, |
|
"loss": 1.6509, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.4778325123152711e-05, |
|
"loss": 1.7052, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6009852216748768e-05, |
|
"loss": 1.7003, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7241379310344828e-05, |
|
"loss": 1.7424, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.847290640394089e-05, |
|
"loss": 1.7223, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.970443349753695e-05, |
|
"loss": 1.6932, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.0935960591133006e-05, |
|
"loss": 1.6393, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.2167487684729066e-05, |
|
"loss": 1.7023, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.3399014778325123e-05, |
|
"loss": 1.6219, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.4630541871921184e-05, |
|
"loss": 1.7031, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.5862068965517244e-05, |
|
"loss": 1.7309, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.70935960591133e-05, |
|
"loss": 1.7251, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.8325123152709358e-05, |
|
"loss": 1.7039, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.9556650246305422e-05, |
|
"loss": 1.6753, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.078817733990148e-05, |
|
"loss": 1.7149, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.2019704433497536e-05, |
|
"loss": 1.7101, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.3251231527093596e-05, |
|
"loss": 1.6208, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 1.5855, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 1.741, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.694581280788178e-05, |
|
"loss": 1.6925, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.817733990147783e-05, |
|
"loss": 1.6607, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.94088669950739e-05, |
|
"loss": 1.5157, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.064039408866995e-05, |
|
"loss": 1.5652, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.187192118226601e-05, |
|
"loss": 1.6547, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.3103448275862066e-05, |
|
"loss": 1.5618, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.433497536945813e-05, |
|
"loss": 1.6555, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.5566502463054186e-05, |
|
"loss": 1.5538, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.679802955665025e-05, |
|
"loss": 1.5496, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.802955665024631e-05, |
|
"loss": 1.5248, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.926108374384237e-05, |
|
"loss": 1.5645, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7904926534140018, |
|
"eval_loss": 1.0879656076431274, |
|
"eval_runtime": 27.6968, |
|
"eval_samples_per_second": 208.869, |
|
"eval_steps_per_second": 6.535, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.9945265462506846e-05, |
|
"loss": 1.5933, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.980842911877395e-05, |
|
"loss": 1.5284, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.9671592775041054e-05, |
|
"loss": 1.5239, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.953475643130816e-05, |
|
"loss": 1.4781, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.939792008757526e-05, |
|
"loss": 1.468, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.926108374384237e-05, |
|
"loss": 1.5165, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.912424740010947e-05, |
|
"loss": 1.5641, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.8987411056376576e-05, |
|
"loss": 1.53, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.885057471264368e-05, |
|
"loss": 1.4946, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.8713738368910785e-05, |
|
"loss": 1.5378, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.857690202517789e-05, |
|
"loss": 1.4228, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.8440065681444994e-05, |
|
"loss": 1.4621, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.83032293377121e-05, |
|
"loss": 1.5405, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.81663929939792e-05, |
|
"loss": 1.4338, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.802955665024631e-05, |
|
"loss": 1.3429, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.789272030651341e-05, |
|
"loss": 1.4912, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.7755883962780516e-05, |
|
"loss": 1.4859, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.761904761904762e-05, |
|
"loss": 1.3696, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.7482211275314725e-05, |
|
"loss": 1.3818, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.734537493158183e-05, |
|
"loss": 1.395, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.7208538587848934e-05, |
|
"loss": 1.5301, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.707170224411604e-05, |
|
"loss": 1.3008, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.693486590038315e-05, |
|
"loss": 1.3089, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.679802955665025e-05, |
|
"loss": 1.4767, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.666119321291735e-05, |
|
"loss": 1.3715, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.652435686918446e-05, |
|
"loss": 1.3359, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.638752052545156e-05, |
|
"loss": 1.38, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.6250684181718664e-05, |
|
"loss": 1.4252, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.611384783798577e-05, |
|
"loss": 1.2751, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.597701149425287e-05, |
|
"loss": 1.4558, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.5840175150519984e-05, |
|
"loss": 1.3744, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.570333880678708e-05, |
|
"loss": 1.3676, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.5566502463054186e-05, |
|
"loss": 1.3031, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.54296661193213e-05, |
|
"loss": 1.346, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.5292829775588395e-05, |
|
"loss": 1.337, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.5155993431855506e-05, |
|
"loss": 1.318, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.501915708812261e-05, |
|
"loss": 1.4677, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.488232074438971e-05, |
|
"loss": 1.2769, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.474548440065682e-05, |
|
"loss": 1.2428, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.460864805692392e-05, |
|
"loss": 1.2961, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.447181171319103e-05, |
|
"loss": 1.3126, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8399308556611927, |
|
"eval_loss": 0.7987398505210876, |
|
"eval_runtime": 27.7317, |
|
"eval_samples_per_second": 208.606, |
|
"eval_steps_per_second": 6.527, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.433497536945813e-05, |
|
"loss": 1.271, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.419813902572523e-05, |
|
"loss": 1.3539, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.406130268199234e-05, |
|
"loss": 1.2938, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.3924466338259446e-05, |
|
"loss": 1.2817, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.3787629994526544e-05, |
|
"loss": 1.2808, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.3650793650793655e-05, |
|
"loss": 1.2517, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.351395730706076e-05, |
|
"loss": 1.2371, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.3377120963327864e-05, |
|
"loss": 1.277, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.324028461959497e-05, |
|
"loss": 1.2152, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.3103448275862066e-05, |
|
"loss": 1.1733, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.296661193212918e-05, |
|
"loss": 1.3563, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.282977558839628e-05, |
|
"loss": 1.4114, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.2692939244663386e-05, |
|
"loss": 1.1794, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.255610290093049e-05, |
|
"loss": 1.2454, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.2419266557197594e-05, |
|
"loss": 1.2245, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.22824302134647e-05, |
|
"loss": 1.2197, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.21455938697318e-05, |
|
"loss": 1.3025, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.200875752599891e-05, |
|
"loss": 1.1717, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.187192118226601e-05, |
|
"loss": 1.2077, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.1735084838533116e-05, |
|
"loss": 1.3137, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.159824849480022e-05, |
|
"loss": 1.2348, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.1461412151067325e-05, |
|
"loss": 1.2611, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.132457580733443e-05, |
|
"loss": 1.0912, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.1187739463601534e-05, |
|
"loss": 1.3145, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.105090311986864e-05, |
|
"loss": 1.2088, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.091406677613574e-05, |
|
"loss": 1.1283, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.077723043240285e-05, |
|
"loss": 1.1388, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.064039408866995e-05, |
|
"loss": 1.2748, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.050355774493706e-05, |
|
"loss": 1.0486, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.036672140120416e-05, |
|
"loss": 1.1514, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.0229885057471265e-05, |
|
"loss": 1.2419, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.009304871373837e-05, |
|
"loss": 1.3025, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.9956212370005474e-05, |
|
"loss": 1.1627, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.981937602627258e-05, |
|
"loss": 1.2263, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.968253968253968e-05, |
|
"loss": 1.1918, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.954570333880679e-05, |
|
"loss": 1.2337, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.94088669950739e-05, |
|
"loss": 1.325, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.9272030651340996e-05, |
|
"loss": 1.1631, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.91351943076081e-05, |
|
"loss": 1.2506, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.899835796387521e-05, |
|
"loss": 1.2831, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.886152162014231e-05, |
|
"loss": 1.2335, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8741573033707866, |
|
"eval_loss": 0.6398221254348755, |
|
"eval_runtime": 27.2608, |
|
"eval_samples_per_second": 212.209, |
|
"eval_steps_per_second": 6.64, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.872468527640942e-05, |
|
"loss": 1.1144, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.858784893267652e-05, |
|
"loss": 1.1746, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.845101258894362e-05, |
|
"loss": 1.1363, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.831417624521073e-05, |
|
"loss": 1.1003, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.817733990147783e-05, |
|
"loss": 1.1743, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.8040503557744935e-05, |
|
"loss": 1.1832, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.7903667214012047e-05, |
|
"loss": 1.1495, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.7766830870279144e-05, |
|
"loss": 1.1282, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.7629994526546255e-05, |
|
"loss": 1.1273, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.749315818281336e-05, |
|
"loss": 1.0586, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.735632183908046e-05, |
|
"loss": 1.1353, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.721948549534757e-05, |
|
"loss": 1.0796, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.7082649151614666e-05, |
|
"loss": 1.1132, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.694581280788178e-05, |
|
"loss": 1.0474, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.680897646414888e-05, |
|
"loss": 1.1392, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.667214012041598e-05, |
|
"loss": 1.1334, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.653530377668309e-05, |
|
"loss": 1.0506, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.6398467432950195e-05, |
|
"loss": 1.1107, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.62616310892173e-05, |
|
"loss": 1.1643, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.6124794745484404e-05, |
|
"loss": 1.0167, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.598795840175151e-05, |
|
"loss": 1.2377, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.585112205801861e-05, |
|
"loss": 1.0392, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 1.1304, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.5577449370552815e-05, |
|
"loss": 1.2185, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.5440613026819926e-05, |
|
"loss": 1.0848, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.530377668308703e-05, |
|
"loss": 1.1142, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.5166940339354135e-05, |
|
"loss": 1.0458, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.503010399562124e-05, |
|
"loss": 1.041, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.489326765188834e-05, |
|
"loss": 1.119, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.475643130815545e-05, |
|
"loss": 1.0259, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.461959496442255e-05, |
|
"loss": 1.14, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 0.994, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.434592227695676e-05, |
|
"loss": 1.011, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.4209085933223865e-05, |
|
"loss": 1.0745, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.407224958949097e-05, |
|
"loss": 1.0777, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.3935413245758074e-05, |
|
"loss": 1.1397, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.379857690202518e-05, |
|
"loss": 1.0727, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.366174055829228e-05, |
|
"loss": 1.0662, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.352490421455939e-05, |
|
"loss": 1.0208, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.338806787082649e-05, |
|
"loss": 1.1244, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9040622299049266, |
|
"eval_loss": 0.497685045003891, |
|
"eval_runtime": 27.8668, |
|
"eval_samples_per_second": 207.595, |
|
"eval_steps_per_second": 6.495, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 3.3251231527093596e-05, |
|
"loss": 0.9674, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.31143951833607e-05, |
|
"loss": 1.0643, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.297755883962781e-05, |
|
"loss": 1.0174, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.284072249589491e-05, |
|
"loss": 1.0732, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.2703886152162014e-05, |
|
"loss": 1.0587, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.256704980842912e-05, |
|
"loss": 1.0531, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.243021346469622e-05, |
|
"loss": 1.0491, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.2293377120963334e-05, |
|
"loss": 0.9433, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.215654077723043e-05, |
|
"loss": 0.9851, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.2019704433497536e-05, |
|
"loss": 1.0559, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.188286808976465e-05, |
|
"loss": 0.9604, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.1746031746031745e-05, |
|
"loss": 0.9938, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.160919540229885e-05, |
|
"loss": 1.0587, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.147235905856596e-05, |
|
"loss": 0.9771, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.133552271483306e-05, |
|
"loss": 1.0232, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.119868637110017e-05, |
|
"loss": 1.014, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.1061850027367273e-05, |
|
"loss": 0.9797, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.092501368363437e-05, |
|
"loss": 0.9898, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.078817733990148e-05, |
|
"loss": 1.0044, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.065134099616858e-05, |
|
"loss": 1.0723, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.0514504652435688e-05, |
|
"loss": 1.0545, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.0377668308702795e-05, |
|
"loss": 0.9979, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.0240831964969896e-05, |
|
"loss": 0.9971, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.0103995621237e-05, |
|
"loss": 1.0341, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.996715927750411e-05, |
|
"loss": 0.9853, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.983032293377121e-05, |
|
"loss": 1.0434, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.9693486590038317e-05, |
|
"loss": 0.94, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.9556650246305422e-05, |
|
"loss": 1.0002, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.9419813902572523e-05, |
|
"loss": 0.9252, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.928297755883963e-05, |
|
"loss": 0.8899, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.914614121510673e-05, |
|
"loss": 1.0344, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.900930487137384e-05, |
|
"loss": 1.008, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.8872468527640944e-05, |
|
"loss": 0.9935, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.8735632183908045e-05, |
|
"loss": 1.0432, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.8598795840175153e-05, |
|
"loss": 0.9235, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.8461959496442257e-05, |
|
"loss": 0.9564, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.8325123152709358e-05, |
|
"loss": 0.9873, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.8188286808976466e-05, |
|
"loss": 1.0216, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.8051450465243574e-05, |
|
"loss": 0.9, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.7914614121510675e-05, |
|
"loss": 0.9814, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.9785, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9139152981849611, |
|
"eval_loss": 0.4472927749156952, |
|
"eval_runtime": 27.773, |
|
"eval_samples_per_second": 208.296, |
|
"eval_steps_per_second": 6.517, |
|
"step": 2033 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.764094143404488e-05, |
|
"loss": 0.97, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.7504105090311988e-05, |
|
"loss": 0.9339, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.7367268746579096e-05, |
|
"loss": 0.8636, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.7230432402846197e-05, |
|
"loss": 0.8525, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.70935960591133e-05, |
|
"loss": 0.9281, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.695675971538041e-05, |
|
"loss": 0.9573, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.681992337164751e-05, |
|
"loss": 1.0129, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.6683087027914618e-05, |
|
"loss": 0.9568, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.6546250684181722e-05, |
|
"loss": 0.8739, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.6409414340448823e-05, |
|
"loss": 1.0142, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.627257799671593e-05, |
|
"loss": 0.8615, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.6135741652983032e-05, |
|
"loss": 0.9213, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.5998905309250136e-05, |
|
"loss": 0.9751, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.5862068965517244e-05, |
|
"loss": 0.9144, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.5725232621784345e-05, |
|
"loss": 0.8717, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.5588396278051453e-05, |
|
"loss": 1.0529, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.5451559934318557e-05, |
|
"loss": 0.9158, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.531472359058566e-05, |
|
"loss": 0.9584, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.5177887246852766e-05, |
|
"loss": 0.9806, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.5041050903119874e-05, |
|
"loss": 0.8941, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.4904214559386975e-05, |
|
"loss": 0.9625, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.476737821565408e-05, |
|
"loss": 0.9277, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.4630541871921184e-05, |
|
"loss": 0.9645, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.4493705528188288e-05, |
|
"loss": 0.9162, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.4356869184455393e-05, |
|
"loss": 0.8772, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.4220032840722497e-05, |
|
"loss": 0.9556, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.40831964969896e-05, |
|
"loss": 0.885, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.3946360153256706e-05, |
|
"loss": 0.7871, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.380952380952381e-05, |
|
"loss": 0.8639, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.3672687465790915e-05, |
|
"loss": 0.9446, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.353585112205802e-05, |
|
"loss": 0.8905, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.3399014778325123e-05, |
|
"loss": 0.9502, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.326217843459223e-05, |
|
"loss": 0.9842, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.3125342090859332e-05, |
|
"loss": 0.8827, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.2988505747126437e-05, |
|
"loss": 0.8861, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.285166940339354e-05, |
|
"loss": 0.9701, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.271483305966065e-05, |
|
"loss": 0.8456, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.2577996715927753e-05, |
|
"loss": 0.8858, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.2441160372194854e-05, |
|
"loss": 0.8431, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.230432402846196e-05, |
|
"loss": 0.9466, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 2.2167487684729066e-05, |
|
"loss": 0.9625, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9199654278305963, |
|
"eval_loss": 0.3929709494113922, |
|
"eval_runtime": 27.3929, |
|
"eval_samples_per_second": 211.186, |
|
"eval_steps_per_second": 6.608, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 2.203065134099617e-05, |
|
"loss": 0.886, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 2.1893814997263272e-05, |
|
"loss": 0.8862, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 2.175697865353038e-05, |
|
"loss": 0.8938, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 2.1620142309797484e-05, |
|
"loss": 0.8165, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 2.148330596606459e-05, |
|
"loss": 0.8117, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 2.1346469622331693e-05, |
|
"loss": 0.8538, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 2.1209633278598797e-05, |
|
"loss": 0.8646, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 2.10727969348659e-05, |
|
"loss": 0.8506, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 2.0935960591133006e-05, |
|
"loss": 0.9332, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 2.079912424740011e-05, |
|
"loss": 0.7804, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 2.0662287903667215e-05, |
|
"loss": 0.8503, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 2.052545155993432e-05, |
|
"loss": 0.8818, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 2.0388615216201424e-05, |
|
"loss": 0.7721, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 2.025177887246853e-05, |
|
"loss": 0.8895, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 2.0114942528735632e-05, |
|
"loss": 0.8575, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.9978106185002737e-05, |
|
"loss": 0.8596, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.984126984126984e-05, |
|
"loss": 0.8394, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.970443349753695e-05, |
|
"loss": 0.8782, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.956759715380405e-05, |
|
"loss": 0.7369, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.9430760810071154e-05, |
|
"loss": 0.8684, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.929392446633826e-05, |
|
"loss": 0.8494, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.9157088122605367e-05, |
|
"loss": 0.764, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.9020251778872468e-05, |
|
"loss": 0.855, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.8883415435139572e-05, |
|
"loss": 0.8766, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.874657909140668e-05, |
|
"loss": 0.8737, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.8609742747673784e-05, |
|
"loss": 0.8441, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.847290640394089e-05, |
|
"loss": 0.8257, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.833607006020799e-05, |
|
"loss": 0.8367, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.8199233716475097e-05, |
|
"loss": 0.8218, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.8062397372742202e-05, |
|
"loss": 0.7645, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.7925561029009306e-05, |
|
"loss": 0.8305, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.7788724685276407e-05, |
|
"loss": 0.8306, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.7651888341543515e-05, |
|
"loss": 0.9052, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.751505199781062e-05, |
|
"loss": 0.7477, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.7378215654077724e-05, |
|
"loss": 0.8458, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.7241379310344828e-05, |
|
"loss": 0.901, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.7104542966611933e-05, |
|
"loss": 0.8307, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.6967706622879037e-05, |
|
"loss": 0.8201, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.683087027914614e-05, |
|
"loss": 0.8514, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.6694033935413246e-05, |
|
"loss": 0.8414, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9327571305099395, |
|
"eval_loss": 0.3537824749946594, |
|
"eval_runtime": 27.9648, |
|
"eval_samples_per_second": 206.867, |
|
"eval_steps_per_second": 6.472, |
|
"step": 2847 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.655719759168035e-05, |
|
"loss": 0.9236, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.6420361247947455e-05, |
|
"loss": 0.8118, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.628352490421456e-05, |
|
"loss": 0.8715, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.6146688560481667e-05, |
|
"loss": 0.8181, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.6009852216748768e-05, |
|
"loss": 0.8577, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.5873015873015872e-05, |
|
"loss": 0.7951, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.573617952928298e-05, |
|
"loss": 0.8414, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.5599343185550085e-05, |
|
"loss": 0.8405, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.5462506841817186e-05, |
|
"loss": 0.8375, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.532567049808429e-05, |
|
"loss": 0.8256, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.5188834154351398e-05, |
|
"loss": 0.9106, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.50519978106185e-05, |
|
"loss": 0.7944, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.4915161466885605e-05, |
|
"loss": 0.8647, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.4778325123152711e-05, |
|
"loss": 0.7921, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.4641488779419815e-05, |
|
"loss": 0.7978, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.450465243568692e-05, |
|
"loss": 0.8136, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.4367816091954022e-05, |
|
"loss": 0.8278, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.4230979748221129e-05, |
|
"loss": 0.8061, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.4094143404488233e-05, |
|
"loss": 0.7277, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.3957307060755337e-05, |
|
"loss": 0.809, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.382047071702244e-05, |
|
"loss": 0.8359, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.3683634373289548e-05, |
|
"loss": 0.8107, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.354679802955665e-05, |
|
"loss": 0.8016, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.3409961685823755e-05, |
|
"loss": 0.8512, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.3273125342090861e-05, |
|
"loss": 0.7948, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.3136288998357965e-05, |
|
"loss": 0.8081, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.2999452654625068e-05, |
|
"loss": 0.854, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.2862616310892173e-05, |
|
"loss": 0.8504, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.2725779967159279e-05, |
|
"loss": 0.824, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.2588943623426383e-05, |
|
"loss": 0.8119, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.2452107279693487e-05, |
|
"loss": 0.8304, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.2315270935960592e-05, |
|
"loss": 0.7737, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.2178434592227696e-05, |
|
"loss": 0.8032, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.20415982484948e-05, |
|
"loss": 0.8448, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.1904761904761905e-05, |
|
"loss": 0.755, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.176792556102901e-05, |
|
"loss": 0.812, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.1631089217296116e-05, |
|
"loss": 0.8194, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.1494252873563218e-05, |
|
"loss": 0.8348, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.1357416529830324e-05, |
|
"loss": 0.7492, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.1220580186097427e-05, |
|
"loss": 0.8227, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.1083743842364533e-05, |
|
"loss": 0.8336, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9334485738980121, |
|
"eval_loss": 0.3332645893096924, |
|
"eval_runtime": 29.4073, |
|
"eval_samples_per_second": 196.72, |
|
"eval_steps_per_second": 6.155, |
|
"step": 3254 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 1.0946907498631636e-05, |
|
"loss": 0.7665, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 1.0810071154898742e-05, |
|
"loss": 0.807, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 1.0673234811165846e-05, |
|
"loss": 0.8521, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.053639846743295e-05, |
|
"loss": 0.7569, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 1.0399562123700055e-05, |
|
"loss": 0.7465, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 1.026272577996716e-05, |
|
"loss": 0.8432, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 1.0125889436234266e-05, |
|
"loss": 0.7659, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 9.989053092501368e-06, |
|
"loss": 0.7682, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 9.852216748768475e-06, |
|
"loss": 0.7285, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 9.715380405035577e-06, |
|
"loss": 0.8721, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 9.578544061302683e-06, |
|
"loss": 0.761, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 9.441707717569786e-06, |
|
"loss": 0.7812, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 9.304871373836892e-06, |
|
"loss": 0.8125, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 9.168035030103995e-06, |
|
"loss": 0.8344, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 9.031198686371101e-06, |
|
"loss": 0.7884, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.894362342638204e-06, |
|
"loss": 0.7692, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 8.75752599890531e-06, |
|
"loss": 0.875, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 8.620689655172414e-06, |
|
"loss": 0.7938, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 8.483853311439519e-06, |
|
"loss": 0.7749, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 8.347016967706623e-06, |
|
"loss": 0.7628, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 8.210180623973727e-06, |
|
"loss": 0.8007, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 8.073344280240833e-06, |
|
"loss": 0.7281, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.936507936507936e-06, |
|
"loss": 0.7633, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 7.799671592775042e-06, |
|
"loss": 0.8202, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 7.662835249042145e-06, |
|
"loss": 0.8123, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 7.52599890530925e-06, |
|
"loss": 0.8223, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 7.3891625615763555e-06, |
|
"loss": 0.8456, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 7.25232621784346e-06, |
|
"loss": 0.752, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 7.115489874110564e-06, |
|
"loss": 0.7942, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 6.978653530377669e-06, |
|
"loss": 0.7635, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.841817186644774e-06, |
|
"loss": 0.7014, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 6.7049808429118775e-06, |
|
"loss": 0.7572, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 6.568144499178983e-06, |
|
"loss": 0.7816, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 6.431308155446086e-06, |
|
"loss": 0.7336, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 6.2944718117131915e-06, |
|
"loss": 0.7665, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 6.157635467980296e-06, |
|
"loss": 0.7669, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 6.0207991242474e-06, |
|
"loss": 0.7587, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 5.883962780514505e-06, |
|
"loss": 0.8309, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 5.747126436781609e-06, |
|
"loss": 0.7201, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.6102900930487136e-06, |
|
"loss": 0.736, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.473453749315818e-06, |
|
"loss": 0.758, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9405358686257562, |
|
"eval_loss": 0.3180586099624634, |
|
"eval_runtime": 28.3456, |
|
"eval_samples_per_second": 204.088, |
|
"eval_steps_per_second": 6.385, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 5.336617405582923e-06, |
|
"loss": 0.6856, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 5.199781061850028e-06, |
|
"loss": 0.8134, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 5.062944718117133e-06, |
|
"loss": 0.8841, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 4.926108374384237e-06, |
|
"loss": 0.7623, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.789272030651342e-06, |
|
"loss": 0.7488, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.652435686918446e-06, |
|
"loss": 0.7433, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.5155993431855505e-06, |
|
"loss": 0.7512, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 4.378762999452655e-06, |
|
"loss": 0.7725, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 4.241926655719759e-06, |
|
"loss": 0.7567, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 4.105090311986864e-06, |
|
"loss": 0.7213, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.968253968253968e-06, |
|
"loss": 0.75, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.8314176245210725e-06, |
|
"loss": 0.7999, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.6945812807881777e-06, |
|
"loss": 0.7489, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.557744937055282e-06, |
|
"loss": 0.7591, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.420908593322387e-06, |
|
"loss": 0.769, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.2840722495894914e-06, |
|
"loss": 0.7171, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 3.1472359058565958e-06, |
|
"loss": 0.7158, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 3.0103995621237e-06, |
|
"loss": 0.7132, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.8735632183908046e-06, |
|
"loss": 0.7236, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.736726874657909e-06, |
|
"loss": 0.7361, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.599890530925014e-06, |
|
"loss": 0.6774, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.4630541871921186e-06, |
|
"loss": 0.8082, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.326217843459223e-06, |
|
"loss": 0.776, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.1893814997263274e-06, |
|
"loss": 0.7655, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 2.052545155993432e-06, |
|
"loss": 0.7834, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.9157088122605362e-06, |
|
"loss": 0.7236, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 1.778872468527641e-06, |
|
"loss": 0.6974, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.6420361247947457e-06, |
|
"loss": 0.73, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.50519978106185e-06, |
|
"loss": 0.6768, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.3683634373289545e-06, |
|
"loss": 0.7841, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.2315270935960593e-06, |
|
"loss": 0.759, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.0946907498631637e-06, |
|
"loss": 0.7244, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 9.578544061302681e-07, |
|
"loss": 0.7326, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 8.210180623973728e-07, |
|
"loss": 0.7484, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 6.841817186644772e-07, |
|
"loss": 0.7589, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.473453749315819e-07, |
|
"loss": 0.7169, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.105090311986864e-07, |
|
"loss": 0.6762, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 2.7367268746579093e-07, |
|
"loss": 0.7896, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 1.3683634373289546e-07, |
|
"loss": 0.8115, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 0.0, |
|
"loss": 0.7343, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"eval_accuracy": 0.9403630077787382, |
|
"eval_loss": 0.31005963683128357, |
|
"eval_runtime": 28.1514, |
|
"eval_samples_per_second": 205.496, |
|
"eval_steps_per_second": 6.43, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"step": 4060, |
|
"total_flos": 1.3140619208067262e+19, |
|
"train_loss": 1.0548507860728673, |
|
"train_runtime": 4291.8856, |
|
"train_samples_per_second": 121.301, |
|
"train_steps_per_second": 0.946 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4060, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 1.3140619208067262e+19, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|