|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9857954545454546, |
|
"eval_steps": 88, |
|
"global_step": 704, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002840909090909091, |
|
"grad_norm": 2.8675832748413086, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.5046, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002840909090909091, |
|
"eval_loss": 0.3339672386646271, |
|
"eval_runtime": 320.8391, |
|
"eval_samples_per_second": 8.4, |
|
"eval_steps_per_second": 1.05, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005681818181818182, |
|
"grad_norm": 3.076528310775757, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.5636, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008522727272727272, |
|
"grad_norm": 2.3580377101898193, |
|
"learning_rate": 6e-06, |
|
"loss": 0.2059, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.011363636363636364, |
|
"grad_norm": 1.2384682893753052, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.266, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.014204545454545454, |
|
"grad_norm": 0.8427222371101379, |
|
"learning_rate": 1e-05, |
|
"loss": 0.1559, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.017045454545454544, |
|
"grad_norm": 1.0242825746536255, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.3084, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.019886363636363636, |
|
"grad_norm": 0.6649225354194641, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.1229, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.022727272727272728, |
|
"grad_norm": 3.461799383163452, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.4072, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02556818181818182, |
|
"grad_norm": 1.2614651918411255, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.1717, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.028409090909090908, |
|
"grad_norm": 1.217345118522644, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2597, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03125, |
|
"grad_norm": 0.8754523992538452, |
|
"learning_rate": 1.9999897541010772e-05, |
|
"loss": 0.1865, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03409090909090909, |
|
"grad_norm": 0.9129774570465088, |
|
"learning_rate": 1.9999590166142656e-05, |
|
"loss": 0.1859, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.036931818181818184, |
|
"grad_norm": 1.7369693517684937, |
|
"learning_rate": 1.999907788169431e-05, |
|
"loss": 0.188, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03977272727272727, |
|
"grad_norm": 1.3096760511398315, |
|
"learning_rate": 1.9998360698163375e-05, |
|
"loss": 0.3204, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04261363636363636, |
|
"grad_norm": 14.911248207092285, |
|
"learning_rate": 1.999743863024622e-05, |
|
"loss": 0.232, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.045454545454545456, |
|
"grad_norm": 1.7296037673950195, |
|
"learning_rate": 1.999631169683768e-05, |
|
"loss": 0.2511, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.048295454545454544, |
|
"grad_norm": 1.1196941137313843, |
|
"learning_rate": 1.999497992103064e-05, |
|
"loss": 0.2549, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05113636363636364, |
|
"grad_norm": 0.7868685126304626, |
|
"learning_rate": 1.9993443330115592e-05, |
|
"loss": 0.1295, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05397727272727273, |
|
"grad_norm": 1.1795912981033325, |
|
"learning_rate": 1.999170195558004e-05, |
|
"loss": 0.1546, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.056818181818181816, |
|
"grad_norm": 0.7677745223045349, |
|
"learning_rate": 1.9989755833107875e-05, |
|
"loss": 0.2035, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05965909090909091, |
|
"grad_norm": 1.0062402486801147, |
|
"learning_rate": 1.9987605002578655e-05, |
|
"loss": 0.2815, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 0.7316843867301941, |
|
"learning_rate": 1.9985249508066754e-05, |
|
"loss": 0.1688, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06534090909090909, |
|
"grad_norm": 0.8864314556121826, |
|
"learning_rate": 1.9982689397840497e-05, |
|
"loss": 0.2197, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06818181818181818, |
|
"grad_norm": 0.7916576862335205, |
|
"learning_rate": 1.997992472436114e-05, |
|
"loss": 0.2421, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07102272727272728, |
|
"grad_norm": 0.6953737735748291, |
|
"learning_rate": 1.9976955544281815e-05, |
|
"loss": 0.1548, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07386363636363637, |
|
"grad_norm": 0.9317605495452881, |
|
"learning_rate": 1.9973781918446363e-05, |
|
"loss": 0.2112, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07670454545454546, |
|
"grad_norm": 1.0706557035446167, |
|
"learning_rate": 1.9970403911888077e-05, |
|
"loss": 0.3094, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07954545454545454, |
|
"grad_norm": 1.016023874282837, |
|
"learning_rate": 1.9966821593828393e-05, |
|
"loss": 0.2843, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08238636363636363, |
|
"grad_norm": 0.7612195611000061, |
|
"learning_rate": 1.996303503767544e-05, |
|
"loss": 0.1433, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08522727272727272, |
|
"grad_norm": 0.7575203776359558, |
|
"learning_rate": 1.9959044321022563e-05, |
|
"loss": 0.1604, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08806818181818182, |
|
"grad_norm": 0.9231955409049988, |
|
"learning_rate": 1.9954849525646727e-05, |
|
"loss": 0.2021, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 0.4868467450141907, |
|
"learning_rate": 1.9950450737506825e-05, |
|
"loss": 0.0573, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09375, |
|
"grad_norm": 1.091597080230713, |
|
"learning_rate": 1.9945848046741934e-05, |
|
"loss": 0.2991, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.09659090909090909, |
|
"grad_norm": 0.8647293448448181, |
|
"learning_rate": 1.9941041547669467e-05, |
|
"loss": 0.2171, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09943181818181818, |
|
"grad_norm": 0.48515236377716064, |
|
"learning_rate": 1.9936031338783226e-05, |
|
"loss": 0.0892, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10227272727272728, |
|
"grad_norm": 0.8053379654884338, |
|
"learning_rate": 1.9930817522751403e-05, |
|
"loss": 0.1805, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.10511363636363637, |
|
"grad_norm": 0.648684024810791, |
|
"learning_rate": 1.992540020641446e-05, |
|
"loss": 0.1416, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.10795454545454546, |
|
"grad_norm": 0.5282648801803589, |
|
"learning_rate": 1.991977950078295e-05, |
|
"loss": 0.1451, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11079545454545454, |
|
"grad_norm": 0.5510435104370117, |
|
"learning_rate": 1.9913955521035234e-05, |
|
"loss": 0.0875, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11363636363636363, |
|
"grad_norm": 0.7555186152458191, |
|
"learning_rate": 1.9907928386515126e-05, |
|
"loss": 0.0847, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11647727272727272, |
|
"grad_norm": 0.4893772006034851, |
|
"learning_rate": 1.9901698220729458e-05, |
|
"loss": 0.1075, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.11931818181818182, |
|
"grad_norm": 0.9237098097801208, |
|
"learning_rate": 1.9895265151345516e-05, |
|
"loss": 0.1608, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12215909090909091, |
|
"grad_norm": 0.7260457277297974, |
|
"learning_rate": 1.9888629310188467e-05, |
|
"loss": 0.266, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.4444400668144226, |
|
"learning_rate": 1.9881790833238615e-05, |
|
"loss": 0.1077, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1278409090909091, |
|
"grad_norm": 1.2695385217666626, |
|
"learning_rate": 1.9874749860628658e-05, |
|
"loss": 0.136, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13068181818181818, |
|
"grad_norm": 0.8816536068916321, |
|
"learning_rate": 1.9867506536640782e-05, |
|
"loss": 0.0873, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.13352272727272727, |
|
"grad_norm": 0.5350353121757507, |
|
"learning_rate": 1.9860061009703714e-05, |
|
"loss": 0.139, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 0.49625250697135925, |
|
"learning_rate": 1.9852413432389685e-05, |
|
"loss": 0.1205, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.13920454545454544, |
|
"grad_norm": 0.5370039343833923, |
|
"learning_rate": 1.9844563961411308e-05, |
|
"loss": 0.1075, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.14204545454545456, |
|
"grad_norm": 1.234710454940796, |
|
"learning_rate": 1.9836512757618355e-05, |
|
"loss": 0.2059, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14488636363636365, |
|
"grad_norm": 0.8026732206344604, |
|
"learning_rate": 1.9828259985994465e-05, |
|
"loss": 0.2543, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.14772727272727273, |
|
"grad_norm": 0.45312148332595825, |
|
"learning_rate": 1.9819805815653768e-05, |
|
"loss": 0.1001, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.15056818181818182, |
|
"grad_norm": 0.4792601466178894, |
|
"learning_rate": 1.981115041983741e-05, |
|
"loss": 0.1408, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1534090909090909, |
|
"grad_norm": 0.40749409794807434, |
|
"learning_rate": 1.9802293975910016e-05, |
|
"loss": 0.0694, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.15625, |
|
"grad_norm": 0.5319134593009949, |
|
"learning_rate": 1.9793236665356043e-05, |
|
"loss": 0.1439, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1590909090909091, |
|
"grad_norm": 0.5237418413162231, |
|
"learning_rate": 1.9783978673776067e-05, |
|
"loss": 0.1198, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.16193181818181818, |
|
"grad_norm": 0.5154628157615662, |
|
"learning_rate": 1.9774520190882978e-05, |
|
"loss": 0.0596, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.16477272727272727, |
|
"grad_norm": 0.6916084885597229, |
|
"learning_rate": 1.97648614104981e-05, |
|
"loss": 0.1561, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.16761363636363635, |
|
"grad_norm": 0.8141054511070251, |
|
"learning_rate": 1.9755002530547206e-05, |
|
"loss": 0.2444, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.17045454545454544, |
|
"grad_norm": 0.324054479598999, |
|
"learning_rate": 1.974494375305647e-05, |
|
"loss": 0.0467, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17329545454545456, |
|
"grad_norm": 0.49560919404029846, |
|
"learning_rate": 1.973468528414833e-05, |
|
"loss": 0.1376, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.17613636363636365, |
|
"grad_norm": 0.4895784854888916, |
|
"learning_rate": 1.9724227334037255e-05, |
|
"loss": 0.1201, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.17897727272727273, |
|
"grad_norm": 0.47013720870018005, |
|
"learning_rate": 1.9713570117025444e-05, |
|
"loss": 0.1037, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.618483304977417, |
|
"learning_rate": 1.9702713851498436e-05, |
|
"loss": 0.1531, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1846590909090909, |
|
"grad_norm": 0.5431604385375977, |
|
"learning_rate": 1.9691658759920625e-05, |
|
"loss": 0.1495, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 0.6768195033073425, |
|
"learning_rate": 1.9680405068830716e-05, |
|
"loss": 0.1585, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1903409090909091, |
|
"grad_norm": 0.5289143919944763, |
|
"learning_rate": 1.9668953008837073e-05, |
|
"loss": 0.1082, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.19318181818181818, |
|
"grad_norm": 0.38905492424964905, |
|
"learning_rate": 1.965730281461299e-05, |
|
"loss": 0.0742, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.19602272727272727, |
|
"grad_norm": 0.6071842908859253, |
|
"learning_rate": 1.96454547248919e-05, |
|
"loss": 0.1822, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.19886363636363635, |
|
"grad_norm": 0.6174128651618958, |
|
"learning_rate": 1.9633408982462453e-05, |
|
"loss": 0.2036, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.20170454545454544, |
|
"grad_norm": 0.4610503613948822, |
|
"learning_rate": 1.962116583416357e-05, |
|
"loss": 0.0834, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.20454545454545456, |
|
"grad_norm": 0.5412945747375488, |
|
"learning_rate": 1.9608725530879376e-05, |
|
"loss": 0.1061, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.20738636363636365, |
|
"grad_norm": 0.2720443606376648, |
|
"learning_rate": 1.9596088327534045e-05, |
|
"loss": 0.0572, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.21022727272727273, |
|
"grad_norm": 0.3362009525299072, |
|
"learning_rate": 1.95832544830866e-05, |
|
"loss": 0.0596, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.21306818181818182, |
|
"grad_norm": 0.4517727494239807, |
|
"learning_rate": 1.957022426052558e-05, |
|
"loss": 0.0983, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2159090909090909, |
|
"grad_norm": 0.44597986340522766, |
|
"learning_rate": 1.9556997926863676e-05, |
|
"loss": 0.1082, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.21875, |
|
"grad_norm": 0.4495086371898651, |
|
"learning_rate": 1.9543575753132242e-05, |
|
"loss": 0.0877, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2215909090909091, |
|
"grad_norm": 0.4055721163749695, |
|
"learning_rate": 1.9529958014375748e-05, |
|
"loss": 0.1046, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.22443181818181818, |
|
"grad_norm": 0.5845265984535217, |
|
"learning_rate": 1.9516144989646145e-05, |
|
"loss": 0.1601, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 0.5843460559844971, |
|
"learning_rate": 1.9502136961997144e-05, |
|
"loss": 0.1922, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23011363636363635, |
|
"grad_norm": 0.4905230402946472, |
|
"learning_rate": 1.9487934218478413e-05, |
|
"loss": 0.1149, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.23295454545454544, |
|
"grad_norm": 0.760354220867157, |
|
"learning_rate": 1.9473537050129704e-05, |
|
"loss": 0.264, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.23579545454545456, |
|
"grad_norm": 0.5880870819091797, |
|
"learning_rate": 1.945894575197488e-05, |
|
"loss": 0.0957, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.23863636363636365, |
|
"grad_norm": 0.7324073314666748, |
|
"learning_rate": 1.9444160623015877e-05, |
|
"loss": 0.1335, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.24147727272727273, |
|
"grad_norm": 0.57627272605896, |
|
"learning_rate": 1.9429181966226558e-05, |
|
"loss": 0.1352, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.24431818181818182, |
|
"grad_norm": 0.41611605882644653, |
|
"learning_rate": 1.9414010088546535e-05, |
|
"loss": 0.0918, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2471590909090909, |
|
"grad_norm": 0.5241921544075012, |
|
"learning_rate": 1.9398645300874865e-05, |
|
"loss": 0.1096, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.45845428109169006, |
|
"learning_rate": 1.9383087918063662e-05, |
|
"loss": 0.139, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 0.11835308372974396, |
|
"eval_runtime": 324.452, |
|
"eval_samples_per_second": 8.306, |
|
"eval_steps_per_second": 1.039, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2528409090909091, |
|
"grad_norm": 0.3612334728240967, |
|
"learning_rate": 1.9367338258911674e-05, |
|
"loss": 0.0563, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2556818181818182, |
|
"grad_norm": 0.7781359553337097, |
|
"learning_rate": 1.935139664615773e-05, |
|
"loss": 0.1679, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2585227272727273, |
|
"grad_norm": 0.41035768389701843, |
|
"learning_rate": 1.933526340647414e-05, |
|
"loss": 0.0654, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.26136363636363635, |
|
"grad_norm": 0.422101229429245, |
|
"learning_rate": 1.9318938870459984e-05, |
|
"loss": 0.0748, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.26420454545454547, |
|
"grad_norm": 0.33451277017593384, |
|
"learning_rate": 1.930242337263436e-05, |
|
"loss": 0.0502, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.26704545454545453, |
|
"grad_norm": 0.5042566061019897, |
|
"learning_rate": 1.9285717251429507e-05, |
|
"loss": 0.0972, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.26988636363636365, |
|
"grad_norm": 0.3712761402130127, |
|
"learning_rate": 1.9268820849183885e-05, |
|
"loss": 0.0804, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.6084650754928589, |
|
"learning_rate": 1.9251734512135155e-05, |
|
"loss": 0.1383, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2755681818181818, |
|
"grad_norm": 0.5942140817642212, |
|
"learning_rate": 1.9234458590413076e-05, |
|
"loss": 0.2123, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2784090909090909, |
|
"grad_norm": 0.4847292900085449, |
|
"learning_rate": 1.921699343803235e-05, |
|
"loss": 0.0427, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.28125, |
|
"grad_norm": 0.6673583388328552, |
|
"learning_rate": 1.9199339412885347e-05, |
|
"loss": 0.2334, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2840909090909091, |
|
"grad_norm": 0.5910840034484863, |
|
"learning_rate": 1.918149687673478e-05, |
|
"loss": 0.1798, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2869318181818182, |
|
"grad_norm": 0.4884462356567383, |
|
"learning_rate": 1.9163466195206292e-05, |
|
"loss": 0.0858, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2897727272727273, |
|
"grad_norm": 0.45178908109664917, |
|
"learning_rate": 1.9145247737780963e-05, |
|
"loss": 0.0957, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.29261363636363635, |
|
"grad_norm": 0.6220026016235352, |
|
"learning_rate": 1.9126841877787745e-05, |
|
"loss": 0.215, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.29545454545454547, |
|
"grad_norm": 0.4645046591758728, |
|
"learning_rate": 1.9108248992395797e-05, |
|
"loss": 0.1185, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.29829545454545453, |
|
"grad_norm": 0.4531736373901367, |
|
"learning_rate": 1.9089469462606766e-05, |
|
"loss": 0.1177, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.30113636363636365, |
|
"grad_norm": 0.5413585901260376, |
|
"learning_rate": 1.9070503673246982e-05, |
|
"loss": 0.1, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3039772727272727, |
|
"grad_norm": 0.3162194490432739, |
|
"learning_rate": 1.905135201295957e-05, |
|
"loss": 0.046, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.3068181818181818, |
|
"grad_norm": 0.41331952810287476, |
|
"learning_rate": 1.9032014874196476e-05, |
|
"loss": 0.0757, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3096590909090909, |
|
"grad_norm": 0.41365161538124084, |
|
"learning_rate": 1.901249265321044e-05, |
|
"loss": 0.0719, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.762642502784729, |
|
"learning_rate": 1.8992785750046866e-05, |
|
"loss": 0.3067, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3153409090909091, |
|
"grad_norm": 0.5370594263076782, |
|
"learning_rate": 1.8972894568535632e-05, |
|
"loss": 0.1088, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3181818181818182, |
|
"grad_norm": 0.6596404910087585, |
|
"learning_rate": 1.895281951628281e-05, |
|
"loss": 0.1886, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3210227272727273, |
|
"grad_norm": 0.4068351984024048, |
|
"learning_rate": 1.8932561004662312e-05, |
|
"loss": 0.0762, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.32386363636363635, |
|
"grad_norm": 0.6985118389129639, |
|
"learning_rate": 1.891211944880746e-05, |
|
"loss": 0.2173, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.32670454545454547, |
|
"grad_norm": 0.24498817324638367, |
|
"learning_rate": 1.8891495267602482e-05, |
|
"loss": 0.0333, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.32954545454545453, |
|
"grad_norm": 0.6893118023872375, |
|
"learning_rate": 1.887068888367394e-05, |
|
"loss": 0.2257, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.33238636363636365, |
|
"grad_norm": 0.5168837904930115, |
|
"learning_rate": 1.8849700723382038e-05, |
|
"loss": 0.1399, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3352272727272727, |
|
"grad_norm": 0.5254336595535278, |
|
"learning_rate": 1.8828531216811912e-05, |
|
"loss": 0.0815, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3380681818181818, |
|
"grad_norm": 0.44607385993003845, |
|
"learning_rate": 1.8807180797764822e-05, |
|
"loss": 0.0736, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3409090909090909, |
|
"grad_norm": 0.5060997605323792, |
|
"learning_rate": 1.8785649903749236e-05, |
|
"loss": 0.1319, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.34375, |
|
"grad_norm": 0.37058401107788086, |
|
"learning_rate": 1.8763938975971873e-05, |
|
"loss": 0.0597, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3465909090909091, |
|
"grad_norm": 0.861648440361023, |
|
"learning_rate": 1.8742048459328684e-05, |
|
"loss": 0.2988, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3494318181818182, |
|
"grad_norm": 0.48567265272140503, |
|
"learning_rate": 1.8719978802395707e-05, |
|
"loss": 0.0934, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3522727272727273, |
|
"grad_norm": 0.3372555673122406, |
|
"learning_rate": 1.8697730457419893e-05, |
|
"loss": 0.0523, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.35511363636363635, |
|
"grad_norm": 0.42780211567878723, |
|
"learning_rate": 1.8675303880309828e-05, |
|
"loss": 0.0615, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.35795454545454547, |
|
"grad_norm": 0.601543128490448, |
|
"learning_rate": 1.8652699530626398e-05, |
|
"loss": 0.2145, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.36079545454545453, |
|
"grad_norm": 0.42834290862083435, |
|
"learning_rate": 1.8629917871573368e-05, |
|
"loss": 0.0707, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.3425846993923187, |
|
"learning_rate": 1.8606959369987885e-05, |
|
"loss": 0.06, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3664772727272727, |
|
"grad_norm": 0.2861498296260834, |
|
"learning_rate": 1.8583824496330925e-05, |
|
"loss": 0.0591, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3693181818181818, |
|
"grad_norm": 0.8360418677330017, |
|
"learning_rate": 1.8560513724677644e-05, |
|
"loss": 0.3178, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3721590909090909, |
|
"grad_norm": 0.5859141945838928, |
|
"learning_rate": 1.853702753270766e-05, |
|
"loss": 0.173, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.3947351276874542, |
|
"learning_rate": 1.8513366401695277e-05, |
|
"loss": 0.0649, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.3778409090909091, |
|
"grad_norm": 0.44475170969963074, |
|
"learning_rate": 1.8489530816499597e-05, |
|
"loss": 0.0705, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3806818181818182, |
|
"grad_norm": 0.40042588114738464, |
|
"learning_rate": 1.8465521265554623e-05, |
|
"loss": 0.053, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3835227272727273, |
|
"grad_norm": 0.32334399223327637, |
|
"learning_rate": 1.8441338240859216e-05, |
|
"loss": 0.0741, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.38636363636363635, |
|
"grad_norm": 0.4624631106853485, |
|
"learning_rate": 1.841698223796703e-05, |
|
"loss": 0.084, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.38920454545454547, |
|
"grad_norm": 0.5654751062393188, |
|
"learning_rate": 1.839245375597635e-05, |
|
"loss": 0.1694, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.39204545454545453, |
|
"grad_norm": 0.5055428147315979, |
|
"learning_rate": 1.8367753297519874e-05, |
|
"loss": 0.1569, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.39488636363636365, |
|
"grad_norm": 0.5599436163902283, |
|
"learning_rate": 1.8342881368754403e-05, |
|
"loss": 0.2114, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.3977272727272727, |
|
"grad_norm": 0.6785524487495422, |
|
"learning_rate": 1.8317838479350473e-05, |
|
"loss": 0.2652, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4005681818181818, |
|
"grad_norm": 0.22756998240947723, |
|
"learning_rate": 1.829262514248191e-05, |
|
"loss": 0.0325, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4034090909090909, |
|
"grad_norm": 0.3240101635456085, |
|
"learning_rate": 1.8267241874815312e-05, |
|
"loss": 0.0677, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.40625, |
|
"grad_norm": 0.5141014456748962, |
|
"learning_rate": 1.8241689196499475e-05, |
|
"loss": 0.0904, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4090909090909091, |
|
"grad_norm": 0.4461572766304016, |
|
"learning_rate": 1.8215967631154718e-05, |
|
"loss": 0.068, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4119318181818182, |
|
"grad_norm": 0.5313737988471985, |
|
"learning_rate": 1.8190077705862155e-05, |
|
"loss": 0.0917, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4147727272727273, |
|
"grad_norm": 0.6240700483322144, |
|
"learning_rate": 1.8164019951152903e-05, |
|
"loss": 0.1616, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.41761363636363635, |
|
"grad_norm": 0.6356779932975769, |
|
"learning_rate": 1.8137794900997204e-05, |
|
"loss": 0.2113, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.42045454545454547, |
|
"grad_norm": 0.5394648909568787, |
|
"learning_rate": 1.811140309279348e-05, |
|
"loss": 0.0956, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.42329545454545453, |
|
"grad_norm": 0.5533218383789062, |
|
"learning_rate": 1.8084845067357334e-05, |
|
"loss": 0.1166, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.42613636363636365, |
|
"grad_norm": 0.5690510869026184, |
|
"learning_rate": 1.805812136891046e-05, |
|
"loss": 0.1485, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4289772727272727, |
|
"grad_norm": 0.4042081832885742, |
|
"learning_rate": 1.8031232545069468e-05, |
|
"loss": 0.0855, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4318181818181818, |
|
"grad_norm": 0.6914238929748535, |
|
"learning_rate": 1.8004179146834712e-05, |
|
"loss": 0.1606, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.4346590909090909, |
|
"grad_norm": 0.2679579555988312, |
|
"learning_rate": 1.7976961728578965e-05, |
|
"loss": 0.0242, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 0.8089874386787415, |
|
"learning_rate": 1.7949580848036045e-05, |
|
"loss": 0.147, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.4403409090909091, |
|
"grad_norm": 0.5541207194328308, |
|
"learning_rate": 1.7922037066289432e-05, |
|
"loss": 0.0898, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.4431818181818182, |
|
"grad_norm": 0.451183557510376, |
|
"learning_rate": 1.7894330947760728e-05, |
|
"loss": 0.1032, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4460227272727273, |
|
"grad_norm": 0.4263628423213959, |
|
"learning_rate": 1.7866463060198114e-05, |
|
"loss": 0.0744, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.44886363636363635, |
|
"grad_norm": 0.3324938714504242, |
|
"learning_rate": 1.7838433974664714e-05, |
|
"loss": 0.0737, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.45170454545454547, |
|
"grad_norm": 0.4336640238761902, |
|
"learning_rate": 1.7810244265526873e-05, |
|
"loss": 0.0697, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.46159589290618896, |
|
"learning_rate": 1.778189451044242e-05, |
|
"loss": 0.1031, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.45738636363636365, |
|
"grad_norm": 0.22606071829795837, |
|
"learning_rate": 1.77533852903488e-05, |
|
"loss": 0.0445, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4602272727272727, |
|
"grad_norm": 0.468505322933197, |
|
"learning_rate": 1.772471718945119e-05, |
|
"loss": 0.1115, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4630681818181818, |
|
"grad_norm": 0.5148159861564636, |
|
"learning_rate": 1.7695890795210517e-05, |
|
"loss": 0.1074, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4659090909090909, |
|
"grad_norm": 0.46925628185272217, |
|
"learning_rate": 1.7666906698331426e-05, |
|
"loss": 0.105, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.46875, |
|
"grad_norm": 0.9212394952774048, |
|
"learning_rate": 1.7637765492750172e-05, |
|
"loss": 0.226, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4715909090909091, |
|
"grad_norm": 0.3673381209373474, |
|
"learning_rate": 1.7608467775622448e-05, |
|
"loss": 0.118, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.4744318181818182, |
|
"grad_norm": 0.5593389868736267, |
|
"learning_rate": 1.757901414731115e-05, |
|
"loss": 0.1513, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.4772727272727273, |
|
"grad_norm": 0.3432629704475403, |
|
"learning_rate": 1.7549405211374072e-05, |
|
"loss": 0.0576, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.48011363636363635, |
|
"grad_norm": 0.325641006231308, |
|
"learning_rate": 1.7519641574551548e-05, |
|
"loss": 0.061, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.48295454545454547, |
|
"grad_norm": 0.49900880455970764, |
|
"learning_rate": 1.7489723846754e-05, |
|
"loss": 0.0929, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.48579545454545453, |
|
"grad_norm": 0.4341491162776947, |
|
"learning_rate": 1.7459652641049473e-05, |
|
"loss": 0.0941, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.48863636363636365, |
|
"grad_norm": 0.48210591077804565, |
|
"learning_rate": 1.7429428573651023e-05, |
|
"loss": 0.0543, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.4914772727272727, |
|
"grad_norm": 0.5630267262458801, |
|
"learning_rate": 1.739905226390413e-05, |
|
"loss": 0.1008, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.4943181818181818, |
|
"grad_norm": 0.6051055192947388, |
|
"learning_rate": 1.7368524334273998e-05, |
|
"loss": 0.1985, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.4971590909090909, |
|
"grad_norm": 0.28148218989372253, |
|
"learning_rate": 1.7337845410332784e-05, |
|
"loss": 0.0499, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6560457348823547, |
|
"learning_rate": 1.73070161207468e-05, |
|
"loss": 0.1212, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.0832223892211914, |
|
"eval_runtime": 325.0904, |
|
"eval_samples_per_second": 8.29, |
|
"eval_steps_per_second": 1.037, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5028409090909091, |
|
"grad_norm": 0.6473656296730042, |
|
"learning_rate": 1.727603709726361e-05, |
|
"loss": 0.0804, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5056818181818182, |
|
"grad_norm": 0.34419476985931396, |
|
"learning_rate": 1.7244908974699112e-05, |
|
"loss": 0.0415, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5085227272727273, |
|
"grad_norm": 0.3303261697292328, |
|
"learning_rate": 1.7213632390924487e-05, |
|
"loss": 0.0659, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5113636363636364, |
|
"grad_norm": 0.32229432463645935, |
|
"learning_rate": 1.7182207986853176e-05, |
|
"loss": 0.0973, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5142045454545454, |
|
"grad_norm": 0.44095346331596375, |
|
"learning_rate": 1.7150636406427713e-05, |
|
"loss": 0.0329, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5170454545454546, |
|
"grad_norm": 0.43706265091896057, |
|
"learning_rate": 1.7118918296606536e-05, |
|
"loss": 0.0854, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5198863636363636, |
|
"grad_norm": 0.32199347019195557, |
|
"learning_rate": 1.708705430735075e-05, |
|
"loss": 0.0327, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5227272727272727, |
|
"grad_norm": 0.30175983905792236, |
|
"learning_rate": 1.7055045091610772e-05, |
|
"loss": 0.0622, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5255681818181818, |
|
"grad_norm": 0.4883711338043213, |
|
"learning_rate": 1.7022891305312987e-05, |
|
"loss": 0.0977, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5284090909090909, |
|
"grad_norm": 0.6222572922706604, |
|
"learning_rate": 1.6990593607346276e-05, |
|
"loss": 0.1161, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.53125, |
|
"grad_norm": 0.415580153465271, |
|
"learning_rate": 1.695815265954855e-05, |
|
"loss": 0.0576, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5340909090909091, |
|
"grad_norm": 0.3268929421901703, |
|
"learning_rate": 1.6925569126693135e-05, |
|
"loss": 0.0627, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5369318181818182, |
|
"grad_norm": 0.41048845648765564, |
|
"learning_rate": 1.6892843676475212e-05, |
|
"loss": 0.0333, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5397727272727273, |
|
"grad_norm": 0.414117693901062, |
|
"learning_rate": 1.685997697949809e-05, |
|
"loss": 0.1126, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5426136363636364, |
|
"grad_norm": 0.2861213982105255, |
|
"learning_rate": 1.6826969709259476e-05, |
|
"loss": 0.0293, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.55129075050354, |
|
"learning_rate": 1.6793822542137683e-05, |
|
"loss": 0.136, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5482954545454546, |
|
"grad_norm": 0.32792291045188904, |
|
"learning_rate": 1.6760536157377754e-05, |
|
"loss": 0.0898, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5511363636363636, |
|
"grad_norm": 0.5784844756126404, |
|
"learning_rate": 1.672711123707756e-05, |
|
"loss": 0.0997, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5539772727272727, |
|
"grad_norm": 0.9223653674125671, |
|
"learning_rate": 1.6693548466173812e-05, |
|
"loss": 0.2186, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5568181818181818, |
|
"grad_norm": 0.44497421383857727, |
|
"learning_rate": 1.665984853242802e-05, |
|
"loss": 0.1132, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5596590909090909, |
|
"grad_norm": 0.3333360552787781, |
|
"learning_rate": 1.662601212641242e-05, |
|
"loss": 0.0383, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 0.5328719615936279, |
|
"learning_rate": 1.6592039941495803e-05, |
|
"loss": 0.0994, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5653409090909091, |
|
"grad_norm": 0.28615421056747437, |
|
"learning_rate": 1.6557932673829313e-05, |
|
"loss": 0.0417, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5681818181818182, |
|
"grad_norm": 0.314860463142395, |
|
"learning_rate": 1.6523691022332184e-05, |
|
"loss": 0.07, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5710227272727273, |
|
"grad_norm": 0.44072073698043823, |
|
"learning_rate": 1.6489315688677416e-05, |
|
"loss": 0.0709, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5738636363636364, |
|
"grad_norm": 0.5856392979621887, |
|
"learning_rate": 1.64548073772774e-05, |
|
"loss": 0.1405, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5767045454545454, |
|
"grad_norm": 0.36462002992630005, |
|
"learning_rate": 1.6420166795269476e-05, |
|
"loss": 0.0441, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5795454545454546, |
|
"grad_norm": 0.6672521233558655, |
|
"learning_rate": 1.6385394652501444e-05, |
|
"loss": 0.1754, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5823863636363636, |
|
"grad_norm": 0.35214805603027344, |
|
"learning_rate": 1.6350491661517032e-05, |
|
"loss": 0.0583, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5852272727272727, |
|
"grad_norm": 0.3882180452346802, |
|
"learning_rate": 1.6315458537541272e-05, |
|
"loss": 0.0775, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.5880681818181818, |
|
"grad_norm": 0.39624354243278503, |
|
"learning_rate": 1.628029599846585e-05, |
|
"loss": 0.0779, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.5909090909090909, |
|
"grad_norm": 0.5808979272842407, |
|
"learning_rate": 1.6245004764834423e-05, |
|
"loss": 0.0635, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.59375, |
|
"grad_norm": 0.23519928753376007, |
|
"learning_rate": 1.6209585559827806e-05, |
|
"loss": 0.037, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.5965909090909091, |
|
"grad_norm": 0.40159645676612854, |
|
"learning_rate": 1.617403910924919e-05, |
|
"loss": 0.0582, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5994318181818182, |
|
"grad_norm": 0.38090643286705017, |
|
"learning_rate": 1.613836614150926e-05, |
|
"loss": 0.0981, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6022727272727273, |
|
"grad_norm": 0.44124341011047363, |
|
"learning_rate": 1.610256738761125e-05, |
|
"loss": 0.0867, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6051136363636364, |
|
"grad_norm": 0.3115885257720947, |
|
"learning_rate": 1.6066643581135994e-05, |
|
"loss": 0.0534, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6079545454545454, |
|
"grad_norm": 0.47533664107322693, |
|
"learning_rate": 1.6030595458226872e-05, |
|
"loss": 0.099, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6107954545454546, |
|
"grad_norm": 0.48215967416763306, |
|
"learning_rate": 1.599442375757473e-05, |
|
"loss": 0.0691, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6136363636363636, |
|
"grad_norm": 0.46541547775268555, |
|
"learning_rate": 1.5958129220402745e-05, |
|
"loss": 0.0872, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6164772727272727, |
|
"grad_norm": 0.5003085136413574, |
|
"learning_rate": 1.5921712590451238e-05, |
|
"loss": 0.1414, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6193181818181818, |
|
"grad_norm": 0.28242507576942444, |
|
"learning_rate": 1.5885174613962427e-05, |
|
"loss": 0.0332, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6221590909090909, |
|
"grad_norm": 0.574775218963623, |
|
"learning_rate": 1.584851603966514e-05, |
|
"loss": 0.1244, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.5019505620002747, |
|
"learning_rate": 1.581173761875947e-05, |
|
"loss": 0.0896, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6278409090909091, |
|
"grad_norm": 0.34306174516677856, |
|
"learning_rate": 1.5774840104901378e-05, |
|
"loss": 0.0771, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6306818181818182, |
|
"grad_norm": 0.2710534334182739, |
|
"learning_rate": 1.5737824254187275e-05, |
|
"loss": 0.0272, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6335227272727273, |
|
"grad_norm": 0.43892914056777954, |
|
"learning_rate": 1.5700690825138473e-05, |
|
"loss": 0.1656, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.36996838450431824, |
|
"learning_rate": 1.5663440578685706e-05, |
|
"loss": 0.0549, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6392045454545454, |
|
"grad_norm": 0.33289599418640137, |
|
"learning_rate": 1.5626074278153485e-05, |
|
"loss": 0.0536, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6420454545454546, |
|
"grad_norm": 0.25877413153648376, |
|
"learning_rate": 1.5588592689244492e-05, |
|
"loss": 0.0534, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6448863636363636, |
|
"grad_norm": 0.402020663022995, |
|
"learning_rate": 1.555099658002387e-05, |
|
"loss": 0.0455, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6477272727272727, |
|
"grad_norm": 0.5195022821426392, |
|
"learning_rate": 1.5513286720903488e-05, |
|
"loss": 0.0739, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6505681818181818, |
|
"grad_norm": 0.4449024200439453, |
|
"learning_rate": 1.5475463884626153e-05, |
|
"loss": 0.0849, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.6534090909090909, |
|
"grad_norm": 0.473652184009552, |
|
"learning_rate": 1.5437528846249783e-05, |
|
"loss": 0.1307, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.65625, |
|
"grad_norm": 0.43150216341018677, |
|
"learning_rate": 1.5399482383131516e-05, |
|
"loss": 0.0455, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.6590909090909091, |
|
"grad_norm": 0.7418766021728516, |
|
"learning_rate": 1.536132527491178e-05, |
|
"loss": 0.143, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.6619318181818182, |
|
"grad_norm": 0.3717086911201477, |
|
"learning_rate": 1.5323058303498326e-05, |
|
"loss": 0.1144, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.6647727272727273, |
|
"grad_norm": 0.33135896921157837, |
|
"learning_rate": 1.52846822530502e-05, |
|
"loss": 0.0345, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.6676136363636364, |
|
"grad_norm": 0.5481773614883423, |
|
"learning_rate": 1.5246197909961659e-05, |
|
"loss": 0.1416, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.6704545454545454, |
|
"grad_norm": 0.3348052203655243, |
|
"learning_rate": 1.5207606062846092e-05, |
|
"loss": 0.0578, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.6732954545454546, |
|
"grad_norm": 0.5042082667350769, |
|
"learning_rate": 1.5168907502519823e-05, |
|
"loss": 0.059, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.6761363636363636, |
|
"grad_norm": 0.3570152819156647, |
|
"learning_rate": 1.5130103021985929e-05, |
|
"loss": 0.106, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.6789772727272727, |
|
"grad_norm": 0.5292866826057434, |
|
"learning_rate": 1.5091193416417981e-05, |
|
"loss": 0.1311, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.6818181818181818, |
|
"grad_norm": 0.44494545459747314, |
|
"learning_rate": 1.5052179483143752e-05, |
|
"loss": 0.0788, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6846590909090909, |
|
"grad_norm": 0.3993355929851532, |
|
"learning_rate": 1.501306202162887e-05, |
|
"loss": 0.044, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 0.4046841561794281, |
|
"learning_rate": 1.4973841833460457e-05, |
|
"loss": 0.066, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6903409090909091, |
|
"grad_norm": 0.247298926115036, |
|
"learning_rate": 1.493451972233067e-05, |
|
"loss": 0.0302, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.6931818181818182, |
|
"grad_norm": 0.4500928521156311, |
|
"learning_rate": 1.4895096494020274e-05, |
|
"loss": 0.0892, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6960227272727273, |
|
"grad_norm": 0.373241126537323, |
|
"learning_rate": 1.4855572956382082e-05, |
|
"loss": 0.0691, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6988636363636364, |
|
"grad_norm": 0.5434209704399109, |
|
"learning_rate": 1.4815949919324443e-05, |
|
"loss": 0.141, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.7017045454545454, |
|
"grad_norm": 0.43519458174705505, |
|
"learning_rate": 1.4776228194794624e-05, |
|
"loss": 0.0753, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7045454545454546, |
|
"grad_norm": 0.23297618329524994, |
|
"learning_rate": 1.473640859676217e-05, |
|
"loss": 0.028, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7073863636363636, |
|
"grad_norm": 0.6036296486854553, |
|
"learning_rate": 1.469649194120224e-05, |
|
"loss": 0.0811, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.7102272727272727, |
|
"grad_norm": 0.4559984505176544, |
|
"learning_rate": 1.4656479046078862e-05, |
|
"loss": 0.0841, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7130681818181818, |
|
"grad_norm": 0.5498476624488831, |
|
"learning_rate": 1.46163707313282e-05, |
|
"loss": 0.1086, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.7159090909090909, |
|
"grad_norm": 0.4110633134841919, |
|
"learning_rate": 1.4576167818841732e-05, |
|
"loss": 0.076, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.71875, |
|
"grad_norm": 0.35567420721054077, |
|
"learning_rate": 1.453587113244941e-05, |
|
"loss": 0.0528, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7215909090909091, |
|
"grad_norm": 0.6413715481758118, |
|
"learning_rate": 1.4495481497902789e-05, |
|
"loss": 0.1407, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7244318181818182, |
|
"grad_norm": 0.496463805437088, |
|
"learning_rate": 1.4454999742858093e-05, |
|
"loss": 0.1195, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.28542259335517883, |
|
"learning_rate": 1.441442669685926e-05, |
|
"loss": 0.0543, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7301136363636364, |
|
"grad_norm": 0.32590752840042114, |
|
"learning_rate": 1.4373763191320955e-05, |
|
"loss": 0.0458, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7329545454545454, |
|
"grad_norm": 0.46513354778289795, |
|
"learning_rate": 1.4333010059511505e-05, |
|
"loss": 0.0999, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7357954545454546, |
|
"grad_norm": 0.5399053692817688, |
|
"learning_rate": 1.4292168136535856e-05, |
|
"loss": 0.1079, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7386363636363636, |
|
"grad_norm": 0.27590784430503845, |
|
"learning_rate": 1.425123825931843e-05, |
|
"loss": 0.0394, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7414772727272727, |
|
"grad_norm": 0.47675570845603943, |
|
"learning_rate": 1.4210221266585997e-05, |
|
"loss": 0.1427, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7443181818181818, |
|
"grad_norm": 0.26074454188346863, |
|
"learning_rate": 1.416911799885049e-05, |
|
"loss": 0.0376, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.7471590909090909, |
|
"grad_norm": 0.3271232843399048, |
|
"learning_rate": 1.4127929298391749e-05, |
|
"loss": 0.0375, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5497257113456726, |
|
"learning_rate": 1.4086656009240308e-05, |
|
"loss": 0.0691, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 0.06671788543462753, |
|
"eval_runtime": 324.3338, |
|
"eval_samples_per_second": 8.309, |
|
"eval_steps_per_second": 1.039, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7528409090909091, |
|
"grad_norm": 0.6283138990402222, |
|
"learning_rate": 1.4045298977160058e-05, |
|
"loss": 0.1483, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7556818181818182, |
|
"grad_norm": 0.41013386845588684, |
|
"learning_rate": 1.4003859049630943e-05, |
|
"loss": 0.0693, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.7585227272727273, |
|
"grad_norm": 0.48505860567092896, |
|
"learning_rate": 1.3962337075831583e-05, |
|
"loss": 0.1313, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.7613636363636364, |
|
"grad_norm": 0.6941801905632019, |
|
"learning_rate": 1.3920733906621861e-05, |
|
"loss": 0.1285, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.7642045454545454, |
|
"grad_norm": 0.32076820731163025, |
|
"learning_rate": 1.3879050394525525e-05, |
|
"loss": 0.0492, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.7670454545454546, |
|
"grad_norm": 0.6292155981063843, |
|
"learning_rate": 1.3837287393712666e-05, |
|
"loss": 0.1172, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7698863636363636, |
|
"grad_norm": 0.2482394427061081, |
|
"learning_rate": 1.3795445759982262e-05, |
|
"loss": 0.0429, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.7727272727272727, |
|
"grad_norm": 0.449253648519516, |
|
"learning_rate": 1.3753526350744611e-05, |
|
"loss": 0.1198, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.7755681818181818, |
|
"grad_norm": 0.25705787539482117, |
|
"learning_rate": 1.3711530025003768e-05, |
|
"loss": 0.037, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.7784090909090909, |
|
"grad_norm": 0.23594215512275696, |
|
"learning_rate": 1.3669457643339956e-05, |
|
"loss": 0.032, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.78125, |
|
"grad_norm": 0.2733691930770874, |
|
"learning_rate": 1.3627310067891913e-05, |
|
"loss": 0.0397, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.7840909090909091, |
|
"grad_norm": 0.4076026678085327, |
|
"learning_rate": 1.3585088162339233e-05, |
|
"loss": 0.0651, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.7869318181818182, |
|
"grad_norm": 0.2765565812587738, |
|
"learning_rate": 1.3542792791884675e-05, |
|
"loss": 0.0237, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.7897727272727273, |
|
"grad_norm": 0.43744075298309326, |
|
"learning_rate": 1.3500424823236413e-05, |
|
"loss": 0.0622, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.7926136363636364, |
|
"grad_norm": 0.40274515748023987, |
|
"learning_rate": 1.3457985124590298e-05, |
|
"loss": 0.0662, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.7954545454545454, |
|
"grad_norm": 0.22685614228248596, |
|
"learning_rate": 1.3415474565612059e-05, |
|
"loss": 0.0365, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7982954545454546, |
|
"grad_norm": 0.3656117618083954, |
|
"learning_rate": 1.337289401741947e-05, |
|
"loss": 0.0811, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.8011363636363636, |
|
"grad_norm": 0.4993748664855957, |
|
"learning_rate": 1.3330244352564527e-05, |
|
"loss": 0.1749, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.8039772727272727, |
|
"grad_norm": 0.4087863266468048, |
|
"learning_rate": 1.328752644501553e-05, |
|
"loss": 0.0426, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8068181818181818, |
|
"grad_norm": 0.3817732334136963, |
|
"learning_rate": 1.324474117013921e-05, |
|
"loss": 0.065, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8096590909090909, |
|
"grad_norm": 0.625643253326416, |
|
"learning_rate": 1.320188940468277e-05, |
|
"loss": 0.2149, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 0.4166359305381775, |
|
"learning_rate": 1.3158972026755927e-05, |
|
"loss": 0.0512, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8153409090909091, |
|
"grad_norm": 0.4035757780075073, |
|
"learning_rate": 1.311598991581291e-05, |
|
"loss": 0.0812, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.4624742865562439, |
|
"learning_rate": 1.3072943952634446e-05, |
|
"loss": 0.0593, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8210227272727273, |
|
"grad_norm": 0.2985156178474426, |
|
"learning_rate": 1.3029835019309713e-05, |
|
"loss": 0.0708, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8238636363636364, |
|
"grad_norm": 0.29609620571136475, |
|
"learning_rate": 1.2986663999218264e-05, |
|
"loss": 0.0337, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8267045454545454, |
|
"grad_norm": 0.4906909465789795, |
|
"learning_rate": 1.2943431777011903e-05, |
|
"loss": 0.1062, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8295454545454546, |
|
"grad_norm": 0.31944286823272705, |
|
"learning_rate": 1.2900139238596598e-05, |
|
"loss": 0.064, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8323863636363636, |
|
"grad_norm": 0.25835123658180237, |
|
"learning_rate": 1.2856787271114292e-05, |
|
"loss": 0.0362, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8352272727272727, |
|
"grad_norm": 0.29158154129981995, |
|
"learning_rate": 1.2813376762924735e-05, |
|
"loss": 0.0254, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8380681818181818, |
|
"grad_norm": 0.47217270731925964, |
|
"learning_rate": 1.2769908603587294e-05, |
|
"loss": 0.0675, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8409090909090909, |
|
"grad_norm": 0.3208584785461426, |
|
"learning_rate": 1.272638368384269e-05, |
|
"loss": 0.059, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.84375, |
|
"grad_norm": 0.27133041620254517, |
|
"learning_rate": 1.268280289559479e-05, |
|
"loss": 0.0247, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.8465909090909091, |
|
"grad_norm": 0.4569489359855652, |
|
"learning_rate": 1.2639167131892294e-05, |
|
"loss": 0.0665, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.8494318181818182, |
|
"grad_norm": 0.40543097257614136, |
|
"learning_rate": 1.259547728691045e-05, |
|
"loss": 0.0712, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.8522727272727273, |
|
"grad_norm": 0.27440789341926575, |
|
"learning_rate": 1.2551734255932728e-05, |
|
"loss": 0.0373, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8551136363636364, |
|
"grad_norm": 0.3247217535972595, |
|
"learning_rate": 1.2507938935332478e-05, |
|
"loss": 0.0293, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.8579545454545454, |
|
"grad_norm": 0.3218747675418854, |
|
"learning_rate": 1.2464092222554554e-05, |
|
"loss": 0.0455, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.8607954545454546, |
|
"grad_norm": 0.4028306007385254, |
|
"learning_rate": 1.2420195016096934e-05, |
|
"loss": 0.0585, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.8636363636363636, |
|
"grad_norm": 0.2925693988800049, |
|
"learning_rate": 1.2376248215492297e-05, |
|
"loss": 0.0471, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.8664772727272727, |
|
"grad_norm": 0.4908764660358429, |
|
"learning_rate": 1.2332252721289594e-05, |
|
"loss": 0.1723, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.8693181818181818, |
|
"grad_norm": 0.3072865605354309, |
|
"learning_rate": 1.2288209435035605e-05, |
|
"loss": 0.035, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.8721590909090909, |
|
"grad_norm": 0.22491200268268585, |
|
"learning_rate": 1.2244119259256442e-05, |
|
"loss": 0.0272, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 0.28174304962158203, |
|
"learning_rate": 1.2199983097439079e-05, |
|
"loss": 0.0441, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.8778409090909091, |
|
"grad_norm": 0.34012681245803833, |
|
"learning_rate": 1.2155801854012816e-05, |
|
"loss": 0.0455, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.8806818181818182, |
|
"grad_norm": 0.49420085549354553, |
|
"learning_rate": 1.2111576434330767e-05, |
|
"loss": 0.0743, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8835227272727273, |
|
"grad_norm": 0.17976929247379303, |
|
"learning_rate": 1.2067307744651288e-05, |
|
"loss": 0.0223, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.8863636363636364, |
|
"grad_norm": 0.49375656247138977, |
|
"learning_rate": 1.2022996692119424e-05, |
|
"loss": 0.1561, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.8892045454545454, |
|
"grad_norm": 0.36805179715156555, |
|
"learning_rate": 1.19786441847483e-05, |
|
"loss": 0.0672, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.8920454545454546, |
|
"grad_norm": 0.4191720485687256, |
|
"learning_rate": 1.1934251131400532e-05, |
|
"loss": 0.0792, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.8948863636363636, |
|
"grad_norm": 0.32242196798324585, |
|
"learning_rate": 1.18898184417696e-05, |
|
"loss": 0.0444, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.8977272727272727, |
|
"grad_norm": 0.32510390877723694, |
|
"learning_rate": 1.1845347026361192e-05, |
|
"loss": 0.0397, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9005681818181818, |
|
"grad_norm": 0.2732970118522644, |
|
"learning_rate": 1.1800837796474562e-05, |
|
"loss": 0.0514, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.9034090909090909, |
|
"grad_norm": 0.3913041055202484, |
|
"learning_rate": 1.1756291664183858e-05, |
|
"loss": 0.068, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.90625, |
|
"grad_norm": 0.2852975130081177, |
|
"learning_rate": 1.1711709542319412e-05, |
|
"loss": 0.0248, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.7706276178359985, |
|
"learning_rate": 1.1667092344449053e-05, |
|
"loss": 0.2667, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9119318181818182, |
|
"grad_norm": 0.230291947722435, |
|
"learning_rate": 1.1622440984859384e-05, |
|
"loss": 0.0334, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9147727272727273, |
|
"grad_norm": 0.35278844833374023, |
|
"learning_rate": 1.1577756378537033e-05, |
|
"loss": 0.102, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9176136363636364, |
|
"grad_norm": 0.2541504502296448, |
|
"learning_rate": 1.1533039441149927e-05, |
|
"loss": 0.0601, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9204545454545454, |
|
"grad_norm": 0.3052884340286255, |
|
"learning_rate": 1.1488291089028501e-05, |
|
"loss": 0.0514, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9232954545454546, |
|
"grad_norm": 0.20483562350273132, |
|
"learning_rate": 1.1443512239146941e-05, |
|
"loss": 0.0374, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9261363636363636, |
|
"grad_norm": 0.36739325523376465, |
|
"learning_rate": 1.1398703809104392e-05, |
|
"loss": 0.0898, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9289772727272727, |
|
"grad_norm": 0.7544044256210327, |
|
"learning_rate": 1.1353866717106137e-05, |
|
"loss": 0.3117, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.9318181818181818, |
|
"grad_norm": 0.23758046329021454, |
|
"learning_rate": 1.130900188194481e-05, |
|
"loss": 0.0341, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9346590909090909, |
|
"grad_norm": 0.4377850294113159, |
|
"learning_rate": 1.1264110222981535e-05, |
|
"loss": 0.0541, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.15626086294651031, |
|
"learning_rate": 1.1219192660127117e-05, |
|
"loss": 0.0093, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9403409090909091, |
|
"grad_norm": 0.23238646984100342, |
|
"learning_rate": 1.1174250113823173e-05, |
|
"loss": 0.0342, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.9431818181818182, |
|
"grad_norm": 0.3313692808151245, |
|
"learning_rate": 1.1129283505023273e-05, |
|
"loss": 0.0618, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.9460227272727273, |
|
"grad_norm": 0.45116209983825684, |
|
"learning_rate": 1.1084293755174083e-05, |
|
"loss": 0.0915, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.9488636363636364, |
|
"grad_norm": 0.29833459854125977, |
|
"learning_rate": 1.1039281786196453e-05, |
|
"loss": 0.0479, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.9517045454545454, |
|
"grad_norm": 0.26834267377853394, |
|
"learning_rate": 1.0994248520466555e-05, |
|
"loss": 0.0414, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.9545454545454546, |
|
"grad_norm": 0.2973485291004181, |
|
"learning_rate": 1.0949194880796967e-05, |
|
"loss": 0.0458, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.9573863636363636, |
|
"grad_norm": 0.5005751252174377, |
|
"learning_rate": 1.0904121790417767e-05, |
|
"loss": 0.1774, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.9602272727272727, |
|
"grad_norm": 0.3884448707103729, |
|
"learning_rate": 1.085903017295761e-05, |
|
"loss": 0.0948, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.9630681818181818, |
|
"grad_norm": 0.4331214725971222, |
|
"learning_rate": 1.0813920952424806e-05, |
|
"loss": 0.0667, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.9659090909090909, |
|
"grad_norm": 0.35598206520080566, |
|
"learning_rate": 1.0768795053188379e-05, |
|
"loss": 0.0458, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.96875, |
|
"grad_norm": 0.29866865277290344, |
|
"learning_rate": 1.0723653399959142e-05, |
|
"loss": 0.0408, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.9715909090909091, |
|
"grad_norm": 0.48330047726631165, |
|
"learning_rate": 1.067849691777072e-05, |
|
"loss": 0.1626, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.9744318181818182, |
|
"grad_norm": 0.4522784352302551, |
|
"learning_rate": 1.063332653196062e-05, |
|
"loss": 0.06, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.9772727272727273, |
|
"grad_norm": 0.25423404574394226, |
|
"learning_rate": 1.0588143168151257e-05, |
|
"loss": 0.0335, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.9801136363636364, |
|
"grad_norm": 0.407180517911911, |
|
"learning_rate": 1.0542947752230987e-05, |
|
"loss": 0.0609, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.9829545454545454, |
|
"grad_norm": 0.5892329812049866, |
|
"learning_rate": 1.049774121033514e-05, |
|
"loss": 0.149, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.9857954545454546, |
|
"grad_norm": 0.24134470522403717, |
|
"learning_rate": 1.0452524468827028e-05, |
|
"loss": 0.0419, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.9886363636363636, |
|
"grad_norm": 0.4492047429084778, |
|
"learning_rate": 1.0407298454278983e-05, |
|
"loss": 0.0709, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.9914772727272727, |
|
"grad_norm": 0.2484259009361267, |
|
"learning_rate": 1.0362064093453348e-05, |
|
"loss": 0.0315, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.9943181818181818, |
|
"grad_norm": 0.26170334219932556, |
|
"learning_rate": 1.0316822313283504e-05, |
|
"loss": 0.0428, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9971590909090909, |
|
"grad_norm": 0.3009549677371979, |
|
"learning_rate": 1.0271574040854863e-05, |
|
"loss": 0.054, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.25771793723106384, |
|
"learning_rate": 1.0226320203385877e-05, |
|
"loss": 0.0322, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.055809713900089264, |
|
"eval_runtime": 322.0734, |
|
"eval_samples_per_second": 8.368, |
|
"eval_steps_per_second": 1.046, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.0028409090909092, |
|
"grad_norm": 0.3883383572101593, |
|
"learning_rate": 1.0181061728209034e-05, |
|
"loss": 0.0703, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.0056818181818181, |
|
"grad_norm": 0.35572555661201477, |
|
"learning_rate": 1.0135799542751861e-05, |
|
"loss": 0.0373, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.0085227272727273, |
|
"grad_norm": 0.29604148864746094, |
|
"learning_rate": 1.0090534574517907e-05, |
|
"loss": 0.0499, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0113636363636365, |
|
"grad_norm": 0.3191431760787964, |
|
"learning_rate": 1.0045267751067758e-05, |
|
"loss": 0.047, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.0142045454545454, |
|
"grad_norm": 0.2044990211725235, |
|
"learning_rate": 1e-05, |
|
"loss": 0.0254, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.0028409090909092, |
|
"grad_norm": 0.22580191493034363, |
|
"learning_rate": 9.954732248932243e-06, |
|
"loss": 0.0359, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0056818181818181, |
|
"grad_norm": 0.329467236995697, |
|
"learning_rate": 9.909465425482093e-06, |
|
"loss": 0.0541, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0085227272727273, |
|
"grad_norm": 0.6182022094726562, |
|
"learning_rate": 9.864200457248144e-06, |
|
"loss": 0.0741, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0113636363636365, |
|
"grad_norm": 0.3866288363933563, |
|
"learning_rate": 9.81893827179097e-06, |
|
"loss": 0.0502, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.0142045454545454, |
|
"grad_norm": 0.3960239291191101, |
|
"learning_rate": 9.773679796614124e-06, |
|
"loss": 0.0317, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0170454545454546, |
|
"grad_norm": 0.27909645438194275, |
|
"learning_rate": 9.728425959145139e-06, |
|
"loss": 0.0365, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.0198863636363635, |
|
"grad_norm": 0.4127347469329834, |
|
"learning_rate": 9.683177686716501e-06, |
|
"loss": 0.0492, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.0227272727272727, |
|
"grad_norm": 0.44473913311958313, |
|
"learning_rate": 9.637935906546655e-06, |
|
"loss": 0.0533, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.0255681818181819, |
|
"grad_norm": 0.3825552463531494, |
|
"learning_rate": 9.592701545721022e-06, |
|
"loss": 0.036, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.0284090909090908, |
|
"grad_norm": 0.2615335285663605, |
|
"learning_rate": 9.547475531172973e-06, |
|
"loss": 0.0386, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.03125, |
|
"grad_norm": 0.44840914011001587, |
|
"learning_rate": 9.502258789664865e-06, |
|
"loss": 0.0967, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.0340909090909092, |
|
"grad_norm": 0.5567358136177063, |
|
"learning_rate": 9.457052247769018e-06, |
|
"loss": 0.0486, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.0369318181818181, |
|
"grad_norm": 0.33771201968193054, |
|
"learning_rate": 9.411856831848745e-06, |
|
"loss": 0.0439, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0397727272727273, |
|
"grad_norm": 0.24962462484836578, |
|
"learning_rate": 9.366673468039384e-06, |
|
"loss": 0.0279, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.0426136363636365, |
|
"grad_norm": 0.44227010011672974, |
|
"learning_rate": 9.321503082229281e-06, |
|
"loss": 0.0597, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.0454545454545454, |
|
"grad_norm": 0.33881664276123047, |
|
"learning_rate": 9.276346600040863e-06, |
|
"loss": 0.037, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.0482954545454546, |
|
"grad_norm": 0.2290397733449936, |
|
"learning_rate": 9.231204946811624e-06, |
|
"loss": 0.0276, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.0511363636363635, |
|
"grad_norm": 0.3605166971683502, |
|
"learning_rate": 9.186079047575197e-06, |
|
"loss": 0.0375, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.0539772727272727, |
|
"grad_norm": 0.5530329942703247, |
|
"learning_rate": 9.140969827042392e-06, |
|
"loss": 0.0787, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.0568181818181819, |
|
"grad_norm": 0.14625893533229828, |
|
"learning_rate": 9.095878209582237e-06, |
|
"loss": 0.014, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.0596590909090908, |
|
"grad_norm": 0.12633948028087616, |
|
"learning_rate": 9.050805119203035e-06, |
|
"loss": 0.0149, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.0625, |
|
"grad_norm": 0.506729781627655, |
|
"learning_rate": 9.00575147953345e-06, |
|
"loss": 0.0723, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.0653409090909092, |
|
"grad_norm": 0.3099755644798279, |
|
"learning_rate": 8.96071821380355e-06, |
|
"loss": 0.0293, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0681818181818181, |
|
"grad_norm": 0.19945262372493744, |
|
"learning_rate": 8.91570624482592e-06, |
|
"loss": 0.0231, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.0710227272727273, |
|
"grad_norm": 0.3164744973182678, |
|
"learning_rate": 8.87071649497673e-06, |
|
"loss": 0.0444, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.0738636363636365, |
|
"grad_norm": 0.3066563308238983, |
|
"learning_rate": 8.825749886176832e-06, |
|
"loss": 0.0383, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.0767045454545454, |
|
"grad_norm": 0.16921481490135193, |
|
"learning_rate": 8.780807339872886e-06, |
|
"loss": 0.0194, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.0795454545454546, |
|
"grad_norm": 0.440748929977417, |
|
"learning_rate": 8.735889777018466e-06, |
|
"loss": 0.0487, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.0823863636363635, |
|
"grad_norm": 0.2782658040523529, |
|
"learning_rate": 8.690998118055193e-06, |
|
"loss": 0.0333, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.0852272727272727, |
|
"grad_norm": 0.22277198731899261, |
|
"learning_rate": 8.646133282893865e-06, |
|
"loss": 0.0217, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.0880681818181819, |
|
"grad_norm": 0.21947693824768066, |
|
"learning_rate": 8.601296190895611e-06, |
|
"loss": 0.0263, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 0.1716959923505783, |
|
"learning_rate": 8.55648776085306e-06, |
|
"loss": 0.017, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.09375, |
|
"grad_norm": 0.3042616546154022, |
|
"learning_rate": 8.511708910971506e-06, |
|
"loss": 0.0392, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.0965909090909092, |
|
"grad_norm": 0.38685595989227295, |
|
"learning_rate": 8.466960558850078e-06, |
|
"loss": 0.0438, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.0994318181818181, |
|
"grad_norm": 0.5748195052146912, |
|
"learning_rate": 8.42224362146297e-06, |
|
"loss": 0.0592, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.1022727272727273, |
|
"grad_norm": 0.4325203597545624, |
|
"learning_rate": 8.37755901514062e-06, |
|
"loss": 0.0682, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1051136363636365, |
|
"grad_norm": 0.21434345841407776, |
|
"learning_rate": 8.332907655550948e-06, |
|
"loss": 0.0239, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1079545454545454, |
|
"grad_norm": 0.243415966629982, |
|
"learning_rate": 8.288290457680593e-06, |
|
"loss": 0.0207, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.1107954545454546, |
|
"grad_norm": 0.4860462546348572, |
|
"learning_rate": 8.243708335816145e-06, |
|
"loss": 0.0563, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.1136363636363635, |
|
"grad_norm": 0.37320631742477417, |
|
"learning_rate": 8.19916220352544e-06, |
|
"loss": 0.0485, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.1164772727272727, |
|
"grad_norm": 0.6365073323249817, |
|
"learning_rate": 8.15465297363881e-06, |
|
"loss": 0.1069, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.1193181818181819, |
|
"grad_norm": 0.2253241240978241, |
|
"learning_rate": 8.110181558230405e-06, |
|
"loss": 0.0292, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.1221590909090908, |
|
"grad_norm": 0.32560545206069946, |
|
"learning_rate": 8.065748868599471e-06, |
|
"loss": 0.0333, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.125, |
|
"grad_norm": 0.1719602346420288, |
|
"learning_rate": 8.021355815251703e-06, |
|
"loss": 0.0148, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.1278409090909092, |
|
"grad_norm": 0.27399298548698425, |
|
"learning_rate": 7.977003307880579e-06, |
|
"loss": 0.039, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.1306818181818181, |
|
"grad_norm": 0.12785029411315918, |
|
"learning_rate": 7.932692255348711e-06, |
|
"loss": 0.0123, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.1335227272727273, |
|
"grad_norm": 0.5757378935813904, |
|
"learning_rate": 7.888423565669236e-06, |
|
"loss": 0.0443, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.1363636363636362, |
|
"grad_norm": 0.1946515142917633, |
|
"learning_rate": 7.844198145987187e-06, |
|
"loss": 0.0159, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.1392045454545454, |
|
"grad_norm": 0.1719973236322403, |
|
"learning_rate": 7.800016902560924e-06, |
|
"loss": 0.0244, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.1420454545454546, |
|
"grad_norm": 0.6020510196685791, |
|
"learning_rate": 7.75588074074356e-06, |
|
"loss": 0.1468, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.1448863636363638, |
|
"grad_norm": 0.1654803454875946, |
|
"learning_rate": 7.7117905649644e-06, |
|
"loss": 0.0137, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.1477272727272727, |
|
"grad_norm": 0.20969092845916748, |
|
"learning_rate": 7.667747278710407e-06, |
|
"loss": 0.0209, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.1505681818181819, |
|
"grad_norm": 0.09986022859811783, |
|
"learning_rate": 7.623751784507707e-06, |
|
"loss": 0.0082, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1534090909090908, |
|
"grad_norm": 0.4177515506744385, |
|
"learning_rate": 7.579804983903067e-06, |
|
"loss": 0.0481, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.15625, |
|
"grad_norm": 0.21432848274707794, |
|
"learning_rate": 7.535907777445449e-06, |
|
"loss": 0.0222, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.1590909090909092, |
|
"grad_norm": 0.15598714351654053, |
|
"learning_rate": 7.492061064667526e-06, |
|
"loss": 0.0212, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.1619318181818181, |
|
"grad_norm": 0.17685821652412415, |
|
"learning_rate": 7.448265744067275e-06, |
|
"loss": 0.0108, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.1647727272727273, |
|
"grad_norm": 0.1826607584953308, |
|
"learning_rate": 7.404522713089555e-06, |
|
"loss": 0.014, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.1676136363636362, |
|
"grad_norm": 0.4454158842563629, |
|
"learning_rate": 7.360832868107708e-06, |
|
"loss": 0.0268, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.1704545454545454, |
|
"grad_norm": 0.13606494665145874, |
|
"learning_rate": 7.3171971044052135e-06, |
|
"loss": 0.014, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.1732954545454546, |
|
"grad_norm": 0.20646145939826965, |
|
"learning_rate": 7.273616316157312e-06, |
|
"loss": 0.0205, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.1761363636363638, |
|
"grad_norm": 0.2317519336938858, |
|
"learning_rate": 7.230091396412711e-06, |
|
"loss": 0.0292, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.1789772727272727, |
|
"grad_norm": 0.40039902925491333, |
|
"learning_rate": 7.186623237075265e-06, |
|
"loss": 0.0699, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 0.377642959356308, |
|
"learning_rate": 7.143212728885714e-06, |
|
"loss": 0.0293, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.1846590909090908, |
|
"grad_norm": 0.29831114411354065, |
|
"learning_rate": 7.0998607614034035e-06, |
|
"loss": 0.0462, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.1875, |
|
"grad_norm": 0.23558694124221802, |
|
"learning_rate": 7.056568222988098e-06, |
|
"loss": 0.0199, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.1903409090909092, |
|
"grad_norm": 0.5152977108955383, |
|
"learning_rate": 7.013336000781739e-06, |
|
"loss": 0.0689, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.1931818181818181, |
|
"grad_norm": 0.3257889747619629, |
|
"learning_rate": 6.970164980690286e-06, |
|
"loss": 0.0272, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.1960227272727273, |
|
"grad_norm": 0.2875789999961853, |
|
"learning_rate": 6.927056047365558e-06, |
|
"loss": 0.0322, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.1988636363636362, |
|
"grad_norm": 0.3039776086807251, |
|
"learning_rate": 6.884010084187094e-06, |
|
"loss": 0.0305, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.2017045454545454, |
|
"grad_norm": 0.16907234489917755, |
|
"learning_rate": 6.841027973244077e-06, |
|
"loss": 0.0104, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.2045454545454546, |
|
"grad_norm": 0.23682954907417297, |
|
"learning_rate": 6.798110595317229e-06, |
|
"loss": 0.0307, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.2073863636363638, |
|
"grad_norm": 0.6338050961494446, |
|
"learning_rate": 6.755258829860791e-06, |
|
"loss": 0.1344, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2102272727272727, |
|
"grad_norm": 0.22749580442905426, |
|
"learning_rate": 6.712473554984472e-06, |
|
"loss": 0.0176, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.2130681818181819, |
|
"grad_norm": 0.18605318665504456, |
|
"learning_rate": 6.669755647435474e-06, |
|
"loss": 0.0202, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.2159090909090908, |
|
"grad_norm": 0.36711132526397705, |
|
"learning_rate": 6.627105982580529e-06, |
|
"loss": 0.0524, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.21875, |
|
"grad_norm": 0.4177369177341461, |
|
"learning_rate": 6.584525434387945e-06, |
|
"loss": 0.0464, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.2215909090909092, |
|
"grad_norm": 0.285185307264328, |
|
"learning_rate": 6.542014875409703e-06, |
|
"loss": 0.0278, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.2244318181818181, |
|
"grad_norm": 0.4436541795730591, |
|
"learning_rate": 6.499575176763591e-06, |
|
"loss": 0.0414, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.2272727272727273, |
|
"grad_norm": 0.33370596170425415, |
|
"learning_rate": 6.457207208115328e-06, |
|
"loss": 0.0299, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.2301136363636362, |
|
"grad_norm": 0.32635894417762756, |
|
"learning_rate": 6.414911837660768e-06, |
|
"loss": 0.0343, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.2329545454545454, |
|
"grad_norm": 0.2855762243270874, |
|
"learning_rate": 6.372689932108092e-06, |
|
"loss": 0.0526, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.2357954545454546, |
|
"grad_norm": 0.194916769862175, |
|
"learning_rate": 6.330542356660047e-06, |
|
"loss": 0.0186, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.2357954545454546, |
|
"eval_loss": 0.052834562957286835, |
|
"eval_runtime": 321.2924, |
|
"eval_samples_per_second": 8.388, |
|
"eval_steps_per_second": 1.049, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.2386363636363638, |
|
"grad_norm": 0.2691531777381897, |
|
"learning_rate": 6.288469974996234e-06, |
|
"loss": 0.0363, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.2414772727272727, |
|
"grad_norm": 0.20350024104118347, |
|
"learning_rate": 6.246473649255391e-06, |
|
"loss": 0.0318, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.2443181818181819, |
|
"grad_norm": 0.36513552069664, |
|
"learning_rate": 6.204554240017742e-06, |
|
"loss": 0.035, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.2471590909090908, |
|
"grad_norm": 0.21137046813964844, |
|
"learning_rate": 6.162712606287335e-06, |
|
"loss": 0.0224, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.29429683089256287, |
|
"learning_rate": 6.120949605474479e-06, |
|
"loss": 0.0308, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.2528409090909092, |
|
"grad_norm": 0.26205527782440186, |
|
"learning_rate": 6.079266093378138e-06, |
|
"loss": 0.0453, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.2556818181818181, |
|
"grad_norm": 0.26017487049102783, |
|
"learning_rate": 6.037662924168419e-06, |
|
"loss": 0.031, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.2585227272727273, |
|
"grad_norm": 0.7420312166213989, |
|
"learning_rate": 5.9961409503690605e-06, |
|
"loss": 0.1057, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.2613636363636362, |
|
"grad_norm": 0.18516167998313904, |
|
"learning_rate": 5.954701022839944e-06, |
|
"loss": 0.0179, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.2642045454545454, |
|
"grad_norm": 0.2416505217552185, |
|
"learning_rate": 5.913343990759695e-06, |
|
"loss": 0.0289, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.2670454545454546, |
|
"grad_norm": 0.3561277389526367, |
|
"learning_rate": 5.872070701608251e-06, |
|
"loss": 0.0559, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.2698863636363638, |
|
"grad_norm": 0.3896782398223877, |
|
"learning_rate": 5.830882001149517e-06, |
|
"loss": 0.0659, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 0.26022788882255554, |
|
"learning_rate": 5.789778733414004e-06, |
|
"loss": 0.0337, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.2755681818181819, |
|
"grad_norm": 0.17623814940452576, |
|
"learning_rate": 5.748761740681573e-06, |
|
"loss": 0.0171, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.2784090909090908, |
|
"grad_norm": 0.20112119615077972, |
|
"learning_rate": 5.707831863464146e-06, |
|
"loss": 0.0226, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.28125, |
|
"grad_norm": 0.26868581771850586, |
|
"learning_rate": 5.666989940488496e-06, |
|
"loss": 0.0281, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.2840909090909092, |
|
"grad_norm": 0.17673923075199127, |
|
"learning_rate": 5.626236808679051e-06, |
|
"loss": 0.0116, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.2869318181818181, |
|
"grad_norm": 0.42206647992134094, |
|
"learning_rate": 5.585573303140741e-06, |
|
"loss": 0.0568, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.2897727272727273, |
|
"grad_norm": 0.18107818067073822, |
|
"learning_rate": 5.54500025714191e-06, |
|
"loss": 0.0224, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.2926136363636362, |
|
"grad_norm": 0.34270885586738586, |
|
"learning_rate": 5.504518502097212e-06, |
|
"loss": 0.0427, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.2954545454545454, |
|
"grad_norm": 0.3076038956642151, |
|
"learning_rate": 5.464128867550593e-06, |
|
"loss": 0.0414, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.2982954545454546, |
|
"grad_norm": 0.23357230424880981, |
|
"learning_rate": 5.423832181158274e-06, |
|
"loss": 0.0226, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.3011363636363638, |
|
"grad_norm": 0.41625964641571045, |
|
"learning_rate": 5.383629268671804e-06, |
|
"loss": 0.0428, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.3039772727272727, |
|
"grad_norm": 0.34053537249565125, |
|
"learning_rate": 5.34352095392114e-06, |
|
"loss": 0.0383, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.3068181818181819, |
|
"grad_norm": 0.313376784324646, |
|
"learning_rate": 5.3035080587977665e-06, |
|
"loss": 0.0429, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.3096590909090908, |
|
"grad_norm": 0.28910282254219055, |
|
"learning_rate": 5.263591403237831e-06, |
|
"loss": 0.0323, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.3125, |
|
"grad_norm": 0.3461354076862335, |
|
"learning_rate": 5.22377180520538e-06, |
|
"loss": 0.0589, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.3153409090909092, |
|
"grad_norm": 0.36325037479400635, |
|
"learning_rate": 5.184050080675558e-06, |
|
"loss": 0.0359, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.3181818181818181, |
|
"grad_norm": 0.20179492235183716, |
|
"learning_rate": 5.144427043617919e-06, |
|
"loss": 0.0259, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.3210227272727273, |
|
"grad_norm": 0.18621599674224854, |
|
"learning_rate": 5.10490350597973e-06, |
|
"loss": 0.0156, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.3238636363636362, |
|
"grad_norm": 0.31812962889671326, |
|
"learning_rate": 5.06548027766933e-06, |
|
"loss": 0.0362, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.3267045454545454, |
|
"grad_norm": 0.19596129655838013, |
|
"learning_rate": 5.026158166539548e-06, |
|
"loss": 0.0239, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.3295454545454546, |
|
"grad_norm": 0.6086770296096802, |
|
"learning_rate": 4.986937978371132e-06, |
|
"loss": 0.0502, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.3323863636363638, |
|
"grad_norm": 0.42738351225852966, |
|
"learning_rate": 4.9478205168562535e-06, |
|
"loss": 0.0484, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.3352272727272727, |
|
"grad_norm": 0.2786358594894409, |
|
"learning_rate": 4.908806583582021e-06, |
|
"loss": 0.0336, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.3380681818181819, |
|
"grad_norm": 0.202577605843544, |
|
"learning_rate": 4.869896978014071e-06, |
|
"loss": 0.0286, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.3409090909090908, |
|
"grad_norm": 0.42583322525024414, |
|
"learning_rate": 4.83109249748018e-06, |
|
"loss": 0.0843, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.34375, |
|
"grad_norm": 0.6603618264198303, |
|
"learning_rate": 4.792393937153914e-06, |
|
"loss": 0.0674, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.3465909090909092, |
|
"grad_norm": 0.43137821555137634, |
|
"learning_rate": 4.753802090038344e-06, |
|
"loss": 0.0516, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.3494318181818181, |
|
"grad_norm": 0.5275350213050842, |
|
"learning_rate": 4.7153177469498045e-06, |
|
"loss": 0.0726, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.3522727272727273, |
|
"grad_norm": 0.35748714208602905, |
|
"learning_rate": 4.676941696501674e-06, |
|
"loss": 0.0453, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.3551136363636362, |
|
"grad_norm": 0.5304725766181946, |
|
"learning_rate": 4.6386747250882226e-06, |
|
"loss": 0.0431, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.3579545454545454, |
|
"grad_norm": 0.2420758306980133, |
|
"learning_rate": 4.600517616868491e-06, |
|
"loss": 0.0284, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.3607954545454546, |
|
"grad_norm": 0.5091615319252014, |
|
"learning_rate": 4.562471153750221e-06, |
|
"loss": 0.1026, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 0.41949352622032166, |
|
"learning_rate": 4.52453611537385e-06, |
|
"loss": 0.0516, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.3664772727272727, |
|
"grad_norm": 0.35782483220100403, |
|
"learning_rate": 4.486713279096515e-06, |
|
"loss": 0.0505, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.3693181818181819, |
|
"grad_norm": 0.4138379395008087, |
|
"learning_rate": 4.449003419976133e-06, |
|
"loss": 0.0789, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.3721590909090908, |
|
"grad_norm": 0.26523318886756897, |
|
"learning_rate": 4.411407310755513e-06, |
|
"loss": 0.0311, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.375, |
|
"grad_norm": 0.46949058771133423, |
|
"learning_rate": 4.373925721846519e-06, |
|
"loss": 0.0293, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.3778409090909092, |
|
"grad_norm": 0.381064772605896, |
|
"learning_rate": 4.336559421314298e-06, |
|
"loss": 0.0525, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.3806818181818181, |
|
"grad_norm": 0.3074735701084137, |
|
"learning_rate": 4.29930917486153e-06, |
|
"loss": 0.0216, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.3835227272727273, |
|
"grad_norm": 0.21338820457458496, |
|
"learning_rate": 4.262175745812729e-06, |
|
"loss": 0.0239, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.3863636363636362, |
|
"grad_norm": 0.31537333130836487, |
|
"learning_rate": 4.225159895098623e-06, |
|
"loss": 0.0412, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.3892045454545454, |
|
"grad_norm": 0.2787759006023407, |
|
"learning_rate": 4.1882623812405345e-06, |
|
"loss": 0.0275, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.3920454545454546, |
|
"grad_norm": 0.2170637845993042, |
|
"learning_rate": 4.151483960334862e-06, |
|
"loss": 0.0244, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.3948863636363638, |
|
"grad_norm": 0.3851534426212311, |
|
"learning_rate": 4.1148253860375764e-06, |
|
"loss": 0.0426, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.3977272727272727, |
|
"grad_norm": 0.15403486788272858, |
|
"learning_rate": 4.078287409548764e-06, |
|
"loss": 0.0171, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.4005681818181819, |
|
"grad_norm": 0.3017263114452362, |
|
"learning_rate": 4.0418707795972575e-06, |
|
"loss": 0.0274, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.4034090909090908, |
|
"grad_norm": 0.3194883465766907, |
|
"learning_rate": 4.005576242425272e-06, |
|
"loss": 0.0374, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.40625, |
|
"grad_norm": 0.428733766078949, |
|
"learning_rate": 3.969404541773132e-06, |
|
"loss": 0.0768, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.4090909090909092, |
|
"grad_norm": 0.25702938437461853, |
|
"learning_rate": 3.933356418864008e-06, |
|
"loss": 0.0256, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.4119318181818181, |
|
"grad_norm": 0.15047182142734528, |
|
"learning_rate": 3.897432612388752e-06, |
|
"loss": 0.0136, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.4147727272727273, |
|
"grad_norm": 0.14957940578460693, |
|
"learning_rate": 3.861633858490746e-06, |
|
"loss": 0.0188, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.4176136363636362, |
|
"grad_norm": 0.16923752427101135, |
|
"learning_rate": 3.8259608907508105e-06, |
|
"loss": 0.0169, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.4204545454545454, |
|
"grad_norm": 0.23884394764900208, |
|
"learning_rate": 3.7904144401721976e-06, |
|
"loss": 0.0301, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.4232954545454546, |
|
"grad_norm": 0.3131102919578552, |
|
"learning_rate": 3.7549952351655795e-06, |
|
"loss": 0.0334, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.4261363636363638, |
|
"grad_norm": 0.3991200923919678, |
|
"learning_rate": 3.719704001534149e-06, |
|
"loss": 0.0712, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.4289772727272727, |
|
"grad_norm": 0.3849276602268219, |
|
"learning_rate": 3.6845414624587326e-06, |
|
"loss": 0.0476, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.4318181818181819, |
|
"grad_norm": 0.3225853145122528, |
|
"learning_rate": 3.649508338482972e-06, |
|
"loss": 0.0515, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.4346590909090908, |
|
"grad_norm": 0.3104516565799713, |
|
"learning_rate": 3.6146053474985565e-06, |
|
"loss": 0.0436, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.4375, |
|
"grad_norm": 0.3936445415019989, |
|
"learning_rate": 3.579833204730525e-06, |
|
"loss": 0.0487, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.4403409090909092, |
|
"grad_norm": 0.21598631143569946, |
|
"learning_rate": 3.5451926227225997e-06, |
|
"loss": 0.0216, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.4431818181818181, |
|
"grad_norm": 0.26715341210365295, |
|
"learning_rate": 3.5106843113225854e-06, |
|
"loss": 0.0269, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.4460227272727273, |
|
"grad_norm": 0.19402551651000977, |
|
"learning_rate": 3.4763089776678206e-06, |
|
"loss": 0.0212, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.4488636363636362, |
|
"grad_norm": 0.3250589370727539, |
|
"learning_rate": 3.4420673261706906e-06, |
|
"loss": 0.0473, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.4517045454545454, |
|
"grad_norm": 0.2799818813800812, |
|
"learning_rate": 3.4079600585041993e-06, |
|
"loss": 0.0313, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.11718513816595078, |
|
"learning_rate": 3.3739878735875797e-06, |
|
"loss": 0.0127, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.4573863636363638, |
|
"grad_norm": 0.25607946515083313, |
|
"learning_rate": 3.3401514675719815e-06, |
|
"loss": 0.0307, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.4602272727272727, |
|
"grad_norm": 0.2513953149318695, |
|
"learning_rate": 3.3064515338261937e-06, |
|
"loss": 0.0334, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.4630681818181819, |
|
"grad_norm": 0.36698806285858154, |
|
"learning_rate": 3.2728887629224415e-06, |
|
"loss": 0.0558, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.4659090909090908, |
|
"grad_norm": 0.46339672803878784, |
|
"learning_rate": 3.239463842622247e-06, |
|
"loss": 0.0619, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.46875, |
|
"grad_norm": 0.37225818634033203, |
|
"learning_rate": 3.206177457862321e-06, |
|
"loss": 0.0443, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.4715909090909092, |
|
"grad_norm": 0.2377389669418335, |
|
"learning_rate": 3.173030290740524e-06, |
|
"loss": 0.0341, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.4744318181818181, |
|
"grad_norm": 0.40146350860595703, |
|
"learning_rate": 3.1400230205019124e-06, |
|
"loss": 0.0405, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.4772727272727273, |
|
"grad_norm": 0.3381498157978058, |
|
"learning_rate": 3.1071563235247883e-06, |
|
"loss": 0.0463, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.4801136363636362, |
|
"grad_norm": 0.19083504378795624, |
|
"learning_rate": 3.074430873306865e-06, |
|
"loss": 0.0217, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.4829545454545454, |
|
"grad_norm": 0.4946057200431824, |
|
"learning_rate": 3.041847340451456e-06, |
|
"loss": 0.0499, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.4857954545454546, |
|
"grad_norm": 0.269089937210083, |
|
"learning_rate": 3.0094063926537233e-06, |
|
"loss": 0.0312, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.4857954545454546, |
|
"eval_loss": 0.04934047535061836, |
|
"eval_runtime": 321.3119, |
|
"eval_samples_per_second": 8.387, |
|
"eval_steps_per_second": 1.049, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.4886363636363638, |
|
"grad_norm": 0.2874927818775177, |
|
"learning_rate": 2.9771086946870177e-06, |
|
"loss": 0.0212, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.4914772727272727, |
|
"grad_norm": 0.2589636743068695, |
|
"learning_rate": 2.944954908389229e-06, |
|
"loss": 0.0201, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.4943181818181819, |
|
"grad_norm": 0.3492966294288635, |
|
"learning_rate": 2.912945692649255e-06, |
|
"loss": 0.0354, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.4971590909090908, |
|
"grad_norm": 0.4711867570877075, |
|
"learning_rate": 2.881081703393466e-06, |
|
"loss": 0.044, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.2505286931991577, |
|
"learning_rate": 2.849363593572293e-06, |
|
"loss": 0.0715, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.5028409090909092, |
|
"grad_norm": 0.12618179619312286, |
|
"learning_rate": 2.8177920131468273e-06, |
|
"loss": 0.0105, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.5056818181818183, |
|
"grad_norm": 0.22411687672138214, |
|
"learning_rate": 2.7863676090755175e-06, |
|
"loss": 0.0259, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.5085227272727273, |
|
"grad_norm": 0.4143596589565277, |
|
"learning_rate": 2.7550910253008933e-06, |
|
"loss": 0.0633, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.5113636363636362, |
|
"grad_norm": 0.4833144545555115, |
|
"learning_rate": 2.723962902736389e-06, |
|
"loss": 0.0649, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.5142045454545454, |
|
"grad_norm": 0.3849513530731201, |
|
"learning_rate": 2.6929838792532035e-06, |
|
"loss": 0.0527, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.5170454545454546, |
|
"grad_norm": 0.2576873004436493, |
|
"learning_rate": 2.662154589667217e-06, |
|
"loss": 0.0246, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.5198863636363638, |
|
"grad_norm": 0.23061725497245789, |
|
"learning_rate": 2.6314756657260053e-06, |
|
"loss": 0.0168, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.5227272727272727, |
|
"grad_norm": 0.18426130712032318, |
|
"learning_rate": 2.6009477360958713e-06, |
|
"loss": 0.0221, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.5255681818181817, |
|
"grad_norm": 0.21770624816417694, |
|
"learning_rate": 2.570571426348978e-06, |
|
"loss": 0.0115, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.5284090909090908, |
|
"grad_norm": 0.2922838032245636, |
|
"learning_rate": 2.540347358950529e-06, |
|
"loss": 0.0354, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.53125, |
|
"grad_norm": 0.15898585319519043, |
|
"learning_rate": 2.5102761532460005e-06, |
|
"loss": 0.0168, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.5340909090909092, |
|
"grad_norm": 0.3408936560153961, |
|
"learning_rate": 2.480358425448457e-06, |
|
"loss": 0.0383, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.5369318181818183, |
|
"grad_norm": 0.2640663981437683, |
|
"learning_rate": 2.450594788625932e-06, |
|
"loss": 0.0391, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.5397727272727273, |
|
"grad_norm": 0.18618500232696533, |
|
"learning_rate": 2.420985852688854e-06, |
|
"loss": 0.0163, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.5426136363636362, |
|
"grad_norm": 0.195343479514122, |
|
"learning_rate": 2.3915322243775564e-06, |
|
"loss": 0.0602, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 0.22662031650543213, |
|
"learning_rate": 2.362234507249832e-06, |
|
"loss": 0.0262, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.5482954545454546, |
|
"grad_norm": 0.36082425713539124, |
|
"learning_rate": 2.3330933016685753e-06, |
|
"loss": 0.0483, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.5511363636363638, |
|
"grad_norm": 0.2544262111186981, |
|
"learning_rate": 2.3041092047894843e-06, |
|
"loss": 0.0209, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.5539772727272727, |
|
"grad_norm": 0.16143347322940826, |
|
"learning_rate": 2.275282810548811e-06, |
|
"loss": 0.0182, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.5568181818181817, |
|
"grad_norm": 0.32790812849998474, |
|
"learning_rate": 2.2466147096512037e-06, |
|
"loss": 0.0451, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.5596590909090908, |
|
"grad_norm": 0.187930628657341, |
|
"learning_rate": 2.2181054895575847e-06, |
|
"loss": 0.0259, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.5625, |
|
"grad_norm": 0.2864295542240143, |
|
"learning_rate": 2.1897557344731292e-06, |
|
"loss": 0.0285, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.5653409090909092, |
|
"grad_norm": 0.20400214195251465, |
|
"learning_rate": 2.1615660253352888e-06, |
|
"loss": 0.0207, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.5681818181818183, |
|
"grad_norm": 0.21328507363796234, |
|
"learning_rate": 2.133536939801888e-06, |
|
"loss": 0.0241, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.5710227272727273, |
|
"grad_norm": 0.250006765127182, |
|
"learning_rate": 2.105669052239274e-06, |
|
"loss": 0.0264, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.5738636363636362, |
|
"grad_norm": 0.28018662333488464, |
|
"learning_rate": 2.077962933710572e-06, |
|
"loss": 0.0376, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.5767045454545454, |
|
"grad_norm": 0.13916772603988647, |
|
"learning_rate": 2.050419151963957e-06, |
|
"loss": 0.0119, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.5795454545454546, |
|
"grad_norm": 0.16353723406791687, |
|
"learning_rate": 2.0230382714210384e-06, |
|
"loss": 0.0175, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.5823863636363638, |
|
"grad_norm": 0.3708636164665222, |
|
"learning_rate": 1.9958208531652876e-06, |
|
"loss": 0.0556, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.5852272727272727, |
|
"grad_norm": 0.29886114597320557, |
|
"learning_rate": 1.9687674549305335e-06, |
|
"loss": 0.0489, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.5880681818181817, |
|
"grad_norm": 0.15193718671798706, |
|
"learning_rate": 1.9418786310895467e-06, |
|
"loss": 0.0141, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.5909090909090908, |
|
"grad_norm": 0.40020623803138733, |
|
"learning_rate": 1.9151549326426654e-06, |
|
"loss": 0.0554, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.59375, |
|
"grad_norm": 0.1339547485113144, |
|
"learning_rate": 1.8885969072065224e-06, |
|
"loss": 0.0122, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.5965909090909092, |
|
"grad_norm": 0.14078541100025177, |
|
"learning_rate": 1.8622050990027996e-06, |
|
"loss": 0.0095, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.5994318181818183, |
|
"grad_norm": 0.20210637152194977, |
|
"learning_rate": 1.835980048847098e-06, |
|
"loss": 0.0212, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.6022727272727273, |
|
"grad_norm": 0.2009180337190628, |
|
"learning_rate": 1.809922294137847e-06, |
|
"loss": 0.0211, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.6051136363636362, |
|
"grad_norm": 0.2940455973148346, |
|
"learning_rate": 1.7840323688452833e-06, |
|
"loss": 0.0357, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.6079545454545454, |
|
"grad_norm": 0.20636039972305298, |
|
"learning_rate": 1.7583108035005269e-06, |
|
"loss": 0.0217, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.6107954545454546, |
|
"grad_norm": 0.6501495838165283, |
|
"learning_rate": 1.7327581251846902e-06, |
|
"loss": 0.0743, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.6136363636363638, |
|
"grad_norm": 0.27706483006477356, |
|
"learning_rate": 1.7073748575180937e-06, |
|
"loss": 0.0234, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.6164772727272727, |
|
"grad_norm": 0.19797846674919128, |
|
"learning_rate": 1.6821615206495313e-06, |
|
"loss": 0.0181, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.6193181818181817, |
|
"grad_norm": 0.2928190529346466, |
|
"learning_rate": 1.6571186312456011e-06, |
|
"loss": 0.0491, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.6221590909090908, |
|
"grad_norm": 0.26671117544174194, |
|
"learning_rate": 1.6322467024801282e-06, |
|
"loss": 0.0264, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.625, |
|
"grad_norm": 0.20558620989322662, |
|
"learning_rate": 1.6075462440236512e-06, |
|
"loss": 0.0178, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.6278409090909092, |
|
"grad_norm": 0.1802850067615509, |
|
"learning_rate": 1.5830177620329712e-06, |
|
"loss": 0.0212, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.6306818181818183, |
|
"grad_norm": 0.24599629640579224, |
|
"learning_rate": 1.558661759140786e-06, |
|
"loss": 0.0298, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.6335227272727273, |
|
"grad_norm": 0.1912572979927063, |
|
"learning_rate": 1.5344787344453803e-06, |
|
"loss": 0.0221, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 0.2533075213432312, |
|
"learning_rate": 1.510469183500405e-06, |
|
"loss": 0.0372, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.6392045454545454, |
|
"grad_norm": 0.3094019293785095, |
|
"learning_rate": 1.4866335983047264e-06, |
|
"loss": 0.034, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.6420454545454546, |
|
"grad_norm": 0.30636775493621826, |
|
"learning_rate": 1.4629724672923384e-06, |
|
"loss": 0.0262, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.6448863636363638, |
|
"grad_norm": 0.3447078466415405, |
|
"learning_rate": 1.439486275322357e-06, |
|
"loss": 0.0446, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.6477272727272727, |
|
"grad_norm": 0.19196145236492157, |
|
"learning_rate": 1.4161755036690771e-06, |
|
"loss": 0.0185, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.6505681818181817, |
|
"grad_norm": 0.2407604306936264, |
|
"learning_rate": 1.393040630012118e-06, |
|
"loss": 0.0172, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.6534090909090908, |
|
"grad_norm": 0.3147240877151489, |
|
"learning_rate": 1.3700821284266352e-06, |
|
"loss": 0.0239, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.65625, |
|
"grad_norm": 0.23018233478069305, |
|
"learning_rate": 1.3473004693736037e-06, |
|
"loss": 0.026, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.6590909090909092, |
|
"grad_norm": 0.3745492696762085, |
|
"learning_rate": 1.324696119690173e-06, |
|
"loss": 0.0336, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.6619318181818183, |
|
"grad_norm": 0.1641959398984909, |
|
"learning_rate": 1.302269542580109e-06, |
|
"loss": 0.0157, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.6647727272727273, |
|
"grad_norm": 0.19555360078811646, |
|
"learning_rate": 1.2800211976042942e-06, |
|
"loss": 0.0213, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.6676136363636362, |
|
"grad_norm": 0.3400610685348511, |
|
"learning_rate": 1.2579515406713194e-06, |
|
"loss": 0.0429, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.6704545454545454, |
|
"grad_norm": 0.18559320271015167, |
|
"learning_rate": 1.236061024028129e-06, |
|
"loss": 0.0164, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.6732954545454546, |
|
"grad_norm": 0.23671524226665497, |
|
"learning_rate": 1.214350096250767e-06, |
|
"loss": 0.0258, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.6761363636363638, |
|
"grad_norm": 0.1904187798500061, |
|
"learning_rate": 1.192819202235178e-06, |
|
"loss": 0.0202, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.6789772727272727, |
|
"grad_norm": 0.23071539402008057, |
|
"learning_rate": 1.1714687831880867e-06, |
|
"loss": 0.0231, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.6818181818181817, |
|
"grad_norm": 0.25730565190315247, |
|
"learning_rate": 1.1502992766179666e-06, |
|
"loss": 0.0338, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.6846590909090908, |
|
"grad_norm": 0.6486604809761047, |
|
"learning_rate": 1.1293111163260639e-06, |
|
"loss": 0.0943, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.6875, |
|
"grad_norm": 0.29426586627960205, |
|
"learning_rate": 1.1085047323975173e-06, |
|
"loss": 0.043, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.6903409090909092, |
|
"grad_norm": 0.25408247113227844, |
|
"learning_rate": 1.0878805511925438e-06, |
|
"loss": 0.0303, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.6931818181818183, |
|
"grad_norm": 0.372081458568573, |
|
"learning_rate": 1.0674389953376928e-06, |
|
"loss": 0.033, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.6960227272727273, |
|
"grad_norm": 0.17783237993717194, |
|
"learning_rate": 1.0471804837171916e-06, |
|
"loss": 0.0124, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.6988636363636362, |
|
"grad_norm": 0.35536065697669983, |
|
"learning_rate": 1.027105431464368e-06, |
|
"loss": 0.0435, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.7017045454545454, |
|
"grad_norm": 0.2801659405231476, |
|
"learning_rate": 1.0072142499531346e-06, |
|
"loss": 0.0329, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.7045454545454546, |
|
"grad_norm": 0.1209142804145813, |
|
"learning_rate": 9.875073467895635e-07, |
|
"loss": 0.011, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.7073863636363638, |
|
"grad_norm": 0.14608953893184662, |
|
"learning_rate": 9.679851258035277e-07, |
|
"loss": 0.0123, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.7102272727272727, |
|
"grad_norm": 0.17487914860248566, |
|
"learning_rate": 9.48647987040433e-07, |
|
"loss": 0.0187, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.7130681818181817, |
|
"grad_norm": 0.2767109274864197, |
|
"learning_rate": 9.294963267530177e-07, |
|
"loss": 0.0359, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.7159090909090908, |
|
"grad_norm": 0.28444820642471313, |
|
"learning_rate": 9.105305373932338e-07, |
|
"loss": 0.0586, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.71875, |
|
"grad_norm": 0.5057897567749023, |
|
"learning_rate": 8.917510076042058e-07, |
|
"loss": 0.0459, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.7215909090909092, |
|
"grad_norm": 0.33112823963165283, |
|
"learning_rate": 8.731581222122587e-07, |
|
"loss": 0.0414, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.7244318181818183, |
|
"grad_norm": 0.22497576475143433, |
|
"learning_rate": 8.547522622190385e-07, |
|
"loss": 0.0259, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 0.18142753839492798, |
|
"learning_rate": 8.365338047937122e-07, |
|
"loss": 0.0166, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.7301136363636362, |
|
"grad_norm": 0.18240651488304138, |
|
"learning_rate": 8.185031232652252e-07, |
|
"loss": 0.0176, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.7329545454545454, |
|
"grad_norm": 0.4496082365512848, |
|
"learning_rate": 8.006605871146578e-07, |
|
"loss": 0.0361, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.7357954545454546, |
|
"grad_norm": 0.3641889989376068, |
|
"learning_rate": 7.830065619676519e-07, |
|
"loss": 0.0339, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.7357954545454546, |
|
"eval_loss": 0.046951260417699814, |
|
"eval_runtime": 322.0399, |
|
"eval_samples_per_second": 8.369, |
|
"eval_steps_per_second": 1.046, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.7386363636363638, |
|
"grad_norm": 0.4126671850681305, |
|
"learning_rate": 7.65541409586924e-07, |
|
"loss": 0.0422, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.7414772727272727, |
|
"grad_norm": 0.3763576149940491, |
|
"learning_rate": 7.482654878648465e-07, |
|
"loss": 0.0417, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.7443181818181817, |
|
"grad_norm": 0.25049203634262085, |
|
"learning_rate": 7.311791508161159e-07, |
|
"loss": 0.0264, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.7471590909090908, |
|
"grad_norm": 0.19837282598018646, |
|
"learning_rate": 7.142827485704951e-07, |
|
"loss": 0.022, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.5346717238426208, |
|
"learning_rate": 6.975766273656425e-07, |
|
"loss": 0.0461, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.7528409090909092, |
|
"grad_norm": 0.5660947561264038, |
|
"learning_rate": 6.810611295400171e-07, |
|
"loss": 0.0568, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.7556818181818183, |
|
"grad_norm": 0.14554603397846222, |
|
"learning_rate": 6.647365935258642e-07, |
|
"loss": 0.0158, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.7585227272727273, |
|
"grad_norm": 0.48322027921676636, |
|
"learning_rate": 6.48603353842272e-07, |
|
"loss": 0.048, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.7613636363636362, |
|
"grad_norm": 0.49817270040512085, |
|
"learning_rate": 6.326617410883296e-07, |
|
"loss": 0.0823, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.7642045454545454, |
|
"grad_norm": 0.41815218329429626, |
|
"learning_rate": 6.169120819363406e-07, |
|
"loss": 0.0629, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.7670454545454546, |
|
"grad_norm": 0.1850947141647339, |
|
"learning_rate": 6.013546991251373e-07, |
|
"loss": 0.0185, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.7698863636363638, |
|
"grad_norm": 0.5910592079162598, |
|
"learning_rate": 5.859899114534662e-07, |
|
"loss": 0.0857, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.7727272727272727, |
|
"grad_norm": 0.35453927516937256, |
|
"learning_rate": 5.708180337734448e-07, |
|
"loss": 0.0308, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.7755681818181817, |
|
"grad_norm": 0.19171951711177826, |
|
"learning_rate": 5.558393769841286e-07, |
|
"loss": 0.0539, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.7784090909090908, |
|
"grad_norm": 0.2592536211013794, |
|
"learning_rate": 5.410542480251202e-07, |
|
"loss": 0.0207, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.78125, |
|
"grad_norm": 0.2815329432487488, |
|
"learning_rate": 5.264629498702966e-07, |
|
"loss": 0.0326, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.7840909090909092, |
|
"grad_norm": 0.14800816774368286, |
|
"learning_rate": 5.12065781521588e-07, |
|
"loss": 0.0153, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.7869318181818183, |
|
"grad_norm": 0.14742711186408997, |
|
"learning_rate": 4.978630380028582e-07, |
|
"loss": 0.0176, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.7897727272727273, |
|
"grad_norm": 0.5278242826461792, |
|
"learning_rate": 4.838550103538575e-07, |
|
"loss": 0.0788, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.7926136363636362, |
|
"grad_norm": 0.18585218489170074, |
|
"learning_rate": 4.700419856242555e-07, |
|
"loss": 0.015, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.7954545454545454, |
|
"grad_norm": 0.16984280943870544, |
|
"learning_rate": 4.5642424686776154e-07, |
|
"loss": 0.0184, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.7982954545454546, |
|
"grad_norm": 0.18244123458862305, |
|
"learning_rate": 4.4300207313632713e-07, |
|
"loss": 0.0185, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.8011363636363638, |
|
"grad_norm": 0.2502816915512085, |
|
"learning_rate": 4.2977573947442175e-07, |
|
"loss": 0.0229, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.8039772727272727, |
|
"grad_norm": 0.22571499645709991, |
|
"learning_rate": 4.167455169134027e-07, |
|
"loss": 0.0237, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.8068181818181817, |
|
"grad_norm": 0.33113282918930054, |
|
"learning_rate": 4.039116724659564e-07, |
|
"loss": 0.0334, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.8096590909090908, |
|
"grad_norm": 0.29996100068092346, |
|
"learning_rate": 3.9127446912062606e-07, |
|
"loss": 0.0295, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.8125, |
|
"grad_norm": 0.26119595766067505, |
|
"learning_rate": 3.788341658364314e-07, |
|
"loss": 0.0313, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.8153409090909092, |
|
"grad_norm": 0.2088990956544876, |
|
"learning_rate": 3.6659101753754975e-07, |
|
"loss": 0.0286, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.23757684230804443, |
|
"learning_rate": 3.5454527510810355e-07, |
|
"loss": 0.0249, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.8210227272727273, |
|
"grad_norm": 0.1616797298192978, |
|
"learning_rate": 3.426971853870109e-07, |
|
"loss": 0.0129, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.8238636363636362, |
|
"grad_norm": 0.21369072794914246, |
|
"learning_rate": 3.3104699116292883e-07, |
|
"loss": 0.0254, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.8267045454545454, |
|
"grad_norm": 0.3602796196937561, |
|
"learning_rate": 3.1959493116928473e-07, |
|
"loss": 0.0458, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.8295454545454546, |
|
"grad_norm": 0.2149425595998764, |
|
"learning_rate": 3.0834124007937616e-07, |
|
"loss": 0.0231, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.8323863636363638, |
|
"grad_norm": 0.16421711444854736, |
|
"learning_rate": 2.972861485015666e-07, |
|
"loss": 0.0139, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.8352272727272727, |
|
"grad_norm": 0.45803770422935486, |
|
"learning_rate": 2.864298829745571e-07, |
|
"loss": 0.0787, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.8380681818181817, |
|
"grad_norm": 0.5590444207191467, |
|
"learning_rate": 2.7577266596274577e-07, |
|
"loss": 0.0367, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.8409090909090908, |
|
"grad_norm": 0.7343291640281677, |
|
"learning_rate": 2.6531471585167e-07, |
|
"loss": 0.1223, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.84375, |
|
"grad_norm": 0.23800815641880035, |
|
"learning_rate": 2.5505624694353027e-07, |
|
"loss": 0.0217, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.8465909090909092, |
|
"grad_norm": 0.2187035083770752, |
|
"learning_rate": 2.4499746945279566e-07, |
|
"loss": 0.0202, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.8494318181818183, |
|
"grad_norm": 0.4073978364467621, |
|
"learning_rate": 2.3513858950190206e-07, |
|
"loss": 0.0514, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.8522727272727273, |
|
"grad_norm": 0.17420588433742523, |
|
"learning_rate": 2.2547980911702406e-07, |
|
"loss": 0.021, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.8551136363636362, |
|
"grad_norm": 0.2199344038963318, |
|
"learning_rate": 2.1602132622393745e-07, |
|
"loss": 0.0258, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.8579545454545454, |
|
"grad_norm": 0.1626044064760208, |
|
"learning_rate": 2.0676333464396125e-07, |
|
"loss": 0.0184, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.8607954545454546, |
|
"grad_norm": 0.20894934237003326, |
|
"learning_rate": 1.9770602408998642e-07, |
|
"loss": 0.0318, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.8636363636363638, |
|
"grad_norm": 0.11425574123859406, |
|
"learning_rate": 1.8884958016259114e-07, |
|
"loss": 0.009, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.8664772727272727, |
|
"grad_norm": 0.13908714056015015, |
|
"learning_rate": 1.8019418434623405e-07, |
|
"loss": 0.0117, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.8693181818181817, |
|
"grad_norm": 0.30832770466804504, |
|
"learning_rate": 1.7174001400553586e-07, |
|
"loss": 0.0376, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.8721590909090908, |
|
"grad_norm": 0.10906349122524261, |
|
"learning_rate": 1.6348724238164583e-07, |
|
"loss": 0.009, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 0.20455297827720642, |
|
"learning_rate": 1.5543603858869216e-07, |
|
"loss": 0.0193, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.8778409090909092, |
|
"grad_norm": 0.09518078714609146, |
|
"learning_rate": 1.475865676103161e-07, |
|
"loss": 0.0081, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.8806818181818183, |
|
"grad_norm": 0.08790381997823715, |
|
"learning_rate": 1.3993899029629e-07, |
|
"loss": 0.0072, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.8835227272727273, |
|
"grad_norm": 0.27762630581855774, |
|
"learning_rate": 1.324934633592201e-07, |
|
"loss": 0.0292, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.8863636363636362, |
|
"grad_norm": 0.16567471623420715, |
|
"learning_rate": 1.2525013937134122e-07, |
|
"loss": 0.016, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.8892045454545454, |
|
"grad_norm": 0.49977800250053406, |
|
"learning_rate": 1.1820916676138384e-07, |
|
"loss": 0.0452, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.8920454545454546, |
|
"grad_norm": 0.2329680621623993, |
|
"learning_rate": 1.1137068981153632e-07, |
|
"loss": 0.0233, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.8948863636363638, |
|
"grad_norm": 0.322808176279068, |
|
"learning_rate": 1.0473484865448524e-07, |
|
"loss": 0.0469, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.8977272727272727, |
|
"grad_norm": 0.2716865837574005, |
|
"learning_rate": 9.830177927054429e-08, |
|
"loss": 0.0377, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.9005681818181817, |
|
"grad_norm": 0.14299501478672028, |
|
"learning_rate": 9.207161348487315e-08, |
|
"loss": 0.0178, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.9034090909090908, |
|
"grad_norm": 0.23493549227714539, |
|
"learning_rate": 8.604447896476853e-08, |
|
"loss": 0.0241, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.90625, |
|
"grad_norm": 0.2541610896587372, |
|
"learning_rate": 8.0220499217053e-08, |
|
"loss": 0.033, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 0.207777202129364, |
|
"learning_rate": 7.459979358554248e-08, |
|
"loss": 0.0328, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.9119318181818183, |
|
"grad_norm": 0.2648699879646301, |
|
"learning_rate": 6.918247724859939e-08, |
|
"loss": 0.0371, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.9147727272727273, |
|
"grad_norm": 0.4931396245956421, |
|
"learning_rate": 6.396866121677558e-08, |
|
"loss": 0.0261, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.9176136363636362, |
|
"grad_norm": 0.46385493874549866, |
|
"learning_rate": 5.895845233053643e-08, |
|
"loss": 0.0771, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.9204545454545454, |
|
"grad_norm": 0.18477115035057068, |
|
"learning_rate": 5.415195325806699e-08, |
|
"loss": 0.0237, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.9232954545454546, |
|
"grad_norm": 0.23025956749916077, |
|
"learning_rate": 4.954926249317815e-08, |
|
"loss": 0.0298, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.9261363636363638, |
|
"grad_norm": 0.11648344993591309, |
|
"learning_rate": 4.5150474353274906e-08, |
|
"loss": 0.0093, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.9289772727272727, |
|
"grad_norm": 0.19737114012241364, |
|
"learning_rate": 4.0955678977436796e-08, |
|
"loss": 0.0248, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.9318181818181817, |
|
"grad_norm": 0.29464903473854065, |
|
"learning_rate": 3.696496232456159e-08, |
|
"loss": 0.0268, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.9346590909090908, |
|
"grad_norm": 0.16712042689323425, |
|
"learning_rate": 3.3178406171608946e-08, |
|
"loss": 0.0178, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.9375, |
|
"grad_norm": 0.1289013773202896, |
|
"learning_rate": 2.9596088111922828e-08, |
|
"loss": 0.0145, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.9403409090909092, |
|
"grad_norm": 0.2535853981971741, |
|
"learning_rate": 2.6218081553638363e-08, |
|
"loss": 0.0272, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.9431818181818183, |
|
"grad_norm": 0.18250080943107605, |
|
"learning_rate": 2.3044455718185254e-08, |
|
"loss": 0.0171, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.9460227272727273, |
|
"grad_norm": 0.4696236848831177, |
|
"learning_rate": 2.0075275638862247e-08, |
|
"loss": 0.0886, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.9488636363636362, |
|
"grad_norm": 0.22248291969299316, |
|
"learning_rate": 1.7310602159505973e-08, |
|
"loss": 0.0246, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.9517045454545454, |
|
"grad_norm": 0.21488967537879944, |
|
"learning_rate": 1.4750491933247513e-08, |
|
"loss": 0.0255, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.9545454545454546, |
|
"grad_norm": 0.19222155213356018, |
|
"learning_rate": 1.2394997421347753e-08, |
|
"loss": 0.0282, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.9573863636363638, |
|
"grad_norm": 0.2110484093427658, |
|
"learning_rate": 1.0244166892124929e-08, |
|
"loss": 0.0159, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.9602272727272727, |
|
"grad_norm": 0.3025636374950409, |
|
"learning_rate": 8.29804441996207e-09, |
|
"loss": 0.0385, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.9630681818181817, |
|
"grad_norm": 0.20144778490066528, |
|
"learning_rate": 6.556669884408839e-09, |
|
"loss": 0.0288, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.9659090909090908, |
|
"grad_norm": 0.364287406206131, |
|
"learning_rate": 5.0200789693588544e-09, |
|
"loss": 0.0404, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.96875, |
|
"grad_norm": 0.1691567450761795, |
|
"learning_rate": 3.688303162322493e-09, |
|
"loss": 0.0192, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.9715909090909092, |
|
"grad_norm": 0.41609564423561096, |
|
"learning_rate": 2.5613697537818505e-09, |
|
"loss": 0.0607, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.9744318181818183, |
|
"grad_norm": 0.3112366795539856, |
|
"learning_rate": 1.6393018366278601e-09, |
|
"loss": 0.0486, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.9772727272727273, |
|
"grad_norm": 0.2627716660499573, |
|
"learning_rate": 9.221183056895566e-10, |
|
"loss": 0.0262, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.9801136363636362, |
|
"grad_norm": 0.3606441020965576, |
|
"learning_rate": 4.0983385734660875e-10, |
|
"loss": 0.0411, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.9829545454545454, |
|
"grad_norm": 0.25090616941452026, |
|
"learning_rate": 1.0245898922844889e-10, |
|
"loss": 0.0203, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.9857954545454546, |
|
"grad_norm": 0.23430025577545166, |
|
"learning_rate": 0.0, |
|
"loss": 0.0275, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.9857954545454546, |
|
"eval_loss": 0.04643603414297104, |
|
"eval_runtime": 321.1661, |
|
"eval_samples_per_second": 8.391, |
|
"eval_steps_per_second": 1.049, |
|
"step": 704 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 704, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 176, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.4271718282860954e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|