|
{ |
|
"best_metric": 0.019963504746556282, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-Kontur-competition-52K/checkpoint-496", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 1240, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.4303202629089355, |
|
"learning_rate": 4.032258064516129e-06, |
|
"loss": 0.7196, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8987796306610107, |
|
"learning_rate": 8.064516129032258e-06, |
|
"loss": 0.6457, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.8838603496551514, |
|
"learning_rate": 1.2096774193548388e-05, |
|
"loss": 0.5219, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.1233789920806885, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 0.4099, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.453838348388672, |
|
"learning_rate": 2.0161290322580645e-05, |
|
"loss": 0.2775, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.15359354019165, |
|
"learning_rate": 2.4193548387096777e-05, |
|
"loss": 0.1655, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 5.333171367645264, |
|
"learning_rate": 2.822580645161291e-05, |
|
"loss": 0.112, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.0966763496398926, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 0.0818, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 13.956466674804688, |
|
"learning_rate": 3.6290322580645165e-05, |
|
"loss": 0.0884, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.109863758087158, |
|
"learning_rate": 4.032258064516129e-05, |
|
"loss": 0.0755, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.274998664855957, |
|
"learning_rate": 4.435483870967742e-05, |
|
"loss": 0.0562, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.711202621459961, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 0.0622, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 9.838350296020508, |
|
"learning_rate": 4.973118279569893e-05, |
|
"loss": 0.0646, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 8.744378089904785, |
|
"learning_rate": 4.92831541218638e-05, |
|
"loss": 0.0631, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 13.287856101989746, |
|
"learning_rate": 4.8835125448028677e-05, |
|
"loss": 0.0732, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.635748863220215, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 0.049, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.990776062011719, |
|
"learning_rate": 4.7939068100358424e-05, |
|
"loss": 0.0709, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 6.079687595367432, |
|
"learning_rate": 4.74910394265233e-05, |
|
"loss": 0.0492, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.6072505712509155, |
|
"learning_rate": 4.704301075268818e-05, |
|
"loss": 0.0527, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.9706329107284546, |
|
"learning_rate": 4.659498207885305e-05, |
|
"loss": 0.0609, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.284497261047363, |
|
"learning_rate": 4.614695340501792e-05, |
|
"loss": 0.0431, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.325084924697876, |
|
"learning_rate": 4.56989247311828e-05, |
|
"loss": 0.0321, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 9.077991485595703, |
|
"learning_rate": 4.5250896057347674e-05, |
|
"loss": 0.0375, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.708994388580322, |
|
"learning_rate": 4.4802867383512545e-05, |
|
"loss": 0.0376, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.06623569875955582, |
|
"eval_runtime": 113.2602, |
|
"eval_samples_per_second": 46.689, |
|
"eval_steps_per_second": 0.98, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 4.394409656524658, |
|
"learning_rate": 4.435483870967742e-05, |
|
"loss": 0.0348, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 20.027359008789062, |
|
"learning_rate": 4.390681003584229e-05, |
|
"loss": 0.0685, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 2.6204428672790527, |
|
"learning_rate": 4.345878136200717e-05, |
|
"loss": 0.0445, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 6.732935905456543, |
|
"learning_rate": 4.301075268817205e-05, |
|
"loss": 0.0357, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 9.537686347961426, |
|
"learning_rate": 4.256272401433692e-05, |
|
"loss": 0.0367, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 4.934083938598633, |
|
"learning_rate": 4.2114695340501795e-05, |
|
"loss": 0.0249, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 2.7931134700775146, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.0312, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.5536555051803589, |
|
"learning_rate": 4.121863799283154e-05, |
|
"loss": 0.0329, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 4.139862060546875, |
|
"learning_rate": 4.077060931899642e-05, |
|
"loss": 0.0306, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 2.881141185760498, |
|
"learning_rate": 4.032258064516129e-05, |
|
"loss": 0.0272, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 4.688608646392822, |
|
"learning_rate": 3.987455197132617e-05, |
|
"loss": 0.0332, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 3.6917848587036133, |
|
"learning_rate": 3.9426523297491045e-05, |
|
"loss": 0.0365, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 3.620156764984131, |
|
"learning_rate": 3.8978494623655915e-05, |
|
"loss": 0.0232, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 5.679781913757324, |
|
"learning_rate": 3.8530465949820786e-05, |
|
"loss": 0.0259, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 3.670245885848999, |
|
"learning_rate": 3.808243727598566e-05, |
|
"loss": 0.0296, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 5.259506702423096, |
|
"learning_rate": 3.763440860215054e-05, |
|
"loss": 0.0155, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 2.921619176864624, |
|
"learning_rate": 3.718637992831541e-05, |
|
"loss": 0.0228, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.523906946182251, |
|
"learning_rate": 3.673835125448029e-05, |
|
"loss": 0.0197, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 4.975411891937256, |
|
"learning_rate": 3.6290322580645165e-05, |
|
"loss": 0.0296, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 3.5619239807128906, |
|
"learning_rate": 3.5842293906810036e-05, |
|
"loss": 0.0265, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 2.4044506549835205, |
|
"learning_rate": 3.539426523297491e-05, |
|
"loss": 0.0217, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 8.599495887756348, |
|
"learning_rate": 3.494623655913979e-05, |
|
"loss": 0.034, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 4.817391395568848, |
|
"learning_rate": 3.449820788530466e-05, |
|
"loss": 0.0244, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.9919489622116089, |
|
"learning_rate": 3.405017921146954e-05, |
|
"loss": 0.0285, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.3462491035461426, |
|
"learning_rate": 3.360215053763441e-05, |
|
"loss": 0.0213, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.019963504746556282, |
|
"eval_runtime": 83.0975, |
|
"eval_samples_per_second": 63.636, |
|
"eval_steps_per_second": 1.336, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 2.6078598499298096, |
|
"learning_rate": 3.3154121863799286e-05, |
|
"loss": 0.029, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.9603502750396729, |
|
"learning_rate": 3.270609318996416e-05, |
|
"loss": 0.0263, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 11.797562599182129, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 0.0256, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 6.010237693786621, |
|
"learning_rate": 3.1810035842293904e-05, |
|
"loss": 0.0259, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 11.067808151245117, |
|
"learning_rate": 3.136200716845878e-05, |
|
"loss": 0.0244, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 8.755197525024414, |
|
"learning_rate": 3.091397849462366e-05, |
|
"loss": 0.0172, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 5.274145603179932, |
|
"learning_rate": 3.046594982078853e-05, |
|
"loss": 0.0219, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 3.9025063514709473, |
|
"learning_rate": 3.0017921146953403e-05, |
|
"loss": 0.0121, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.16941958665847778, |
|
"learning_rate": 2.9569892473118284e-05, |
|
"loss": 0.0151, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 5.620195388793945, |
|
"learning_rate": 2.9121863799283154e-05, |
|
"loss": 0.02, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 4.685577869415283, |
|
"learning_rate": 2.8673835125448028e-05, |
|
"loss": 0.0212, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.9184539914131165, |
|
"learning_rate": 2.822580645161291e-05, |
|
"loss": 0.0243, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 4.498030662536621, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.0199, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.5963709354400635, |
|
"learning_rate": 2.7329749103942653e-05, |
|
"loss": 0.0189, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.7427639961242676, |
|
"learning_rate": 2.6881720430107527e-05, |
|
"loss": 0.01, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 1.3474091291427612, |
|
"learning_rate": 2.6433691756272404e-05, |
|
"loss": 0.0121, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 2.8091518878936768, |
|
"learning_rate": 2.5985663082437278e-05, |
|
"loss": 0.0252, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.0539556741714478, |
|
"learning_rate": 2.5537634408602152e-05, |
|
"loss": 0.0192, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.38558557629585266, |
|
"learning_rate": 2.5089605734767026e-05, |
|
"loss": 0.0115, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 2.709695339202881, |
|
"learning_rate": 2.46415770609319e-05, |
|
"loss": 0.0178, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 4.6164751052856445, |
|
"learning_rate": 2.4193548387096777e-05, |
|
"loss": 0.0118, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 2.706479549407959, |
|
"learning_rate": 2.374551971326165e-05, |
|
"loss": 0.0165, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 2.981581211090088, |
|
"learning_rate": 2.3297491039426525e-05, |
|
"loss": 0.0206, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 1.4225656986236572, |
|
"learning_rate": 2.28494623655914e-05, |
|
"loss": 0.0092, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 1.1643812656402588, |
|
"learning_rate": 2.2401433691756272e-05, |
|
"loss": 0.0094, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.13013091683387756, |
|
"eval_runtime": 83.9945, |
|
"eval_samples_per_second": 62.957, |
|
"eval_steps_per_second": 1.322, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 2.4861252307891846, |
|
"learning_rate": 2.1953405017921146e-05, |
|
"loss": 0.0198, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 2.1413233280181885, |
|
"learning_rate": 2.1505376344086024e-05, |
|
"loss": 0.0142, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 2.2505719661712646, |
|
"learning_rate": 2.1057347670250897e-05, |
|
"loss": 0.0151, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 2.7683489322662354, |
|
"learning_rate": 2.060931899641577e-05, |
|
"loss": 0.0202, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 1.9615308046340942, |
|
"learning_rate": 2.0161290322580645e-05, |
|
"loss": 0.0076, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 2.115872621536255, |
|
"learning_rate": 1.9713261648745522e-05, |
|
"loss": 0.0133, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 0.3355218768119812, |
|
"learning_rate": 1.9265232974910393e-05, |
|
"loss": 0.0149, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 1.101788878440857, |
|
"learning_rate": 1.881720430107527e-05, |
|
"loss": 0.0168, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 4.441117763519287, |
|
"learning_rate": 1.8369175627240144e-05, |
|
"loss": 0.0101, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 0.4963151216506958, |
|
"learning_rate": 1.7921146953405018e-05, |
|
"loss": 0.0079, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 0.14219801127910614, |
|
"learning_rate": 1.7473118279569895e-05, |
|
"loss": 0.0117, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 0.04577568918466568, |
|
"learning_rate": 1.702508960573477e-05, |
|
"loss": 0.0065, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 3.448432207107544, |
|
"learning_rate": 1.6577060931899643e-05, |
|
"loss": 0.0141, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 0.43813198804855347, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 0.0121, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 1.8220750093460083, |
|
"learning_rate": 1.568100358422939e-05, |
|
"loss": 0.0057, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 4.196650505065918, |
|
"learning_rate": 1.5232974910394265e-05, |
|
"loss": 0.0082, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 1.0396842956542969, |
|
"learning_rate": 1.4784946236559142e-05, |
|
"loss": 0.0113, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 3.6332926750183105, |
|
"learning_rate": 1.4336917562724014e-05, |
|
"loss": 0.0056, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 6.7421369552612305, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.0085, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 1.0675561428070068, |
|
"learning_rate": 1.3440860215053763e-05, |
|
"loss": 0.0061, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 3.8124799728393555, |
|
"learning_rate": 1.2992831541218639e-05, |
|
"loss": 0.0087, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 4.4822540283203125, |
|
"learning_rate": 1.2544802867383513e-05, |
|
"loss": 0.0048, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 1.812424898147583, |
|
"learning_rate": 1.2096774193548388e-05, |
|
"loss": 0.0074, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 6.007579803466797, |
|
"learning_rate": 1.1648745519713262e-05, |
|
"loss": 0.0116, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 0.2030833214521408, |
|
"learning_rate": 1.1200716845878136e-05, |
|
"loss": 0.0103, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.060427818447351456, |
|
"eval_runtime": 84.6044, |
|
"eval_samples_per_second": 62.503, |
|
"eval_steps_per_second": 1.312, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 0.05641289800405502, |
|
"learning_rate": 1.0752688172043012e-05, |
|
"loss": 0.0085, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 0.805799126625061, |
|
"learning_rate": 1.0304659498207886e-05, |
|
"loss": 0.0077, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 4.972249984741211, |
|
"learning_rate": 9.856630824372761e-06, |
|
"loss": 0.0113, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 0.03549932688474655, |
|
"learning_rate": 9.408602150537635e-06, |
|
"loss": 0.0052, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 1.279232144355774, |
|
"learning_rate": 8.960573476702509e-06, |
|
"loss": 0.0077, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 0.22537721693515778, |
|
"learning_rate": 8.512544802867385e-06, |
|
"loss": 0.0082, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 1.7011988162994385, |
|
"learning_rate": 8.064516129032258e-06, |
|
"loss": 0.0036, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 4.37709379196167, |
|
"learning_rate": 7.616487455197132e-06, |
|
"loss": 0.0041, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 2.2503268718719482, |
|
"learning_rate": 7.168458781362007e-06, |
|
"loss": 0.0152, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 3.610405683517456, |
|
"learning_rate": 6.720430107526882e-06, |
|
"loss": 0.0107, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 0.22053413093090057, |
|
"learning_rate": 6.2724014336917564e-06, |
|
"loss": 0.0021, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 8.507258415222168, |
|
"learning_rate": 5.824372759856631e-06, |
|
"loss": 0.0141, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 5.034768581390381, |
|
"learning_rate": 5.376344086021506e-06, |
|
"loss": 0.0142, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 0.22189994156360626, |
|
"learning_rate": 4.928315412186381e-06, |
|
"loss": 0.0094, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 2.6591525077819824, |
|
"learning_rate": 4.4802867383512545e-06, |
|
"loss": 0.0039, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 4.526949405670166, |
|
"learning_rate": 4.032258064516129e-06, |
|
"loss": 0.0032, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 3.290435791015625, |
|
"learning_rate": 3.5842293906810035e-06, |
|
"loss": 0.0031, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.6715773344039917, |
|
"learning_rate": 3.1362007168458782e-06, |
|
"loss": 0.0028, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 0.7959145903587341, |
|
"learning_rate": 2.688172043010753e-06, |
|
"loss": 0.0063, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.10818018019199371, |
|
"learning_rate": 2.2401433691756272e-06, |
|
"loss": 0.0045, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 0.03141075000166893, |
|
"learning_rate": 1.7921146953405017e-06, |
|
"loss": 0.0068, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 5.118274688720703, |
|
"learning_rate": 1.3440860215053765e-06, |
|
"loss": 0.0071, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 1.3246444463729858, |
|
"learning_rate": 8.960573476702509e-07, |
|
"loss": 0.0026, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 2.687753915786743, |
|
"learning_rate": 4.4802867383512544e-07, |
|
"loss": 0.0046, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.166173055768013, |
|
"learning_rate": 0.0, |
|
"loss": 0.0073, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.07113554328680038, |
|
"eval_runtime": 85.5998, |
|
"eval_samples_per_second": 61.776, |
|
"eval_steps_per_second": 1.297, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 1240, |
|
"total_flos": 5.914482579184435e+18, |
|
"train_loss": 0.04452576920570385, |
|
"train_runtime": 5366.5614, |
|
"train_samples_per_second": 44.339, |
|
"train_steps_per_second": 0.231 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1240, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 5.914482579184435e+18, |
|
"train_batch_size": 48, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|