diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,5920 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.20538317868978367, + "eval_steps": 100000000, + "global_step": 25200, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 8.15012613848348e-06, + "grad_norm": 43.29102325439453, + "learning_rate": 1.0000000000000001e-07, + "loss": 81.0533, + "step": 1 + }, + { + "epoch": 0.0002445037841545044, + "grad_norm": 42.03493881225586, + "learning_rate": 3e-06, + "loss": 70.3325, + "step": 30 + }, + { + "epoch": 0.0004890075683090088, + "grad_norm": 10.497812271118164, + "learning_rate": 6e-06, + "loss": 23.8844, + "step": 60 + }, + { + "epoch": 0.0007335113524635132, + "grad_norm": 6.692718029022217, + "learning_rate": 9e-06, + "loss": 11.7198, + "step": 90 + }, + { + "epoch": 0.0009780151366180175, + "grad_norm": 9.11446475982666, + "learning_rate": 1.2e-05, + "loss": 10.3199, + "step": 120 + }, + { + "epoch": 0.001222518920772522, + "grad_norm": 16.92243766784668, + "learning_rate": 1.5e-05, + "loss": 9.5338, + "step": 150 + }, + { + "epoch": 0.0014670227049270264, + "grad_norm": 12.929216384887695, + "learning_rate": 1.8e-05, + "loss": 8.9114, + "step": 180 + }, + { + "epoch": 0.0017115264890815308, + "grad_norm": 8.830116271972656, + "learning_rate": 2.1e-05, + "loss": 8.3867, + "step": 210 + }, + { + "epoch": 0.001956030273236035, + "grad_norm": 7.348124980926514, + "learning_rate": 2.4e-05, + "loss": 8.0113, + "step": 240 + }, + { + "epoch": 0.0022005340573905395, + "grad_norm": 12.751787185668945, + "learning_rate": 2.7000000000000002e-05, + "loss": 7.6093, + "step": 270 + }, + { + "epoch": 0.002445037841545044, + "grad_norm": 15.10444164276123, + "learning_rate": 3e-05, + "loss": 7.3484, + "step": 300 + }, + { + "epoch": 0.0026895416256995483, + "grad_norm": 6.653383731842041, + "learning_rate": 3.3e-05, + "loss": 7.2105, + "step": 330 + }, + { + "epoch": 0.0029340454098540527, + "grad_norm": 6.986039161682129, + "learning_rate": 3.6e-05, + "loss": 7.0228, + "step": 360 + }, + { + "epoch": 0.003178549194008557, + "grad_norm": 6.230088710784912, + "learning_rate": 3.9000000000000006e-05, + "loss": 6.8948, + "step": 390 + }, + { + "epoch": 0.0034230529781630616, + "grad_norm": 8.170981407165527, + "learning_rate": 4.2e-05, + "loss": 6.6965, + "step": 420 + }, + { + "epoch": 0.0036675567623175656, + "grad_norm": 5.6345930099487305, + "learning_rate": 4.5e-05, + "loss": 6.5988, + "step": 450 + }, + { + "epoch": 0.00391206054647207, + "grad_norm": 5.156513214111328, + "learning_rate": 4.8e-05, + "loss": 6.4795, + "step": 480 + }, + { + "epoch": 0.0041565643306265745, + "grad_norm": 5.4964189529418945, + "learning_rate": 4.999999990869806e-05, + "loss": 6.333, + "step": 510 + }, + { + "epoch": 0.004401068114781079, + "grad_norm": 3.67378306388855, + "learning_rate": 4.999999853916893e-05, + "loss": 6.2208, + "step": 540 + }, + { + "epoch": 0.004645571898935583, + "grad_norm": 8.507222175598145, + "learning_rate": 4.9999995526204936e-05, + "loss": 6.1097, + "step": 570 + }, + { + "epoch": 0.004890075683090088, + "grad_norm": 3.756618022918701, + "learning_rate": 4.999999086980628e-05, + "loss": 5.9886, + "step": 600 + }, + { + "epoch": 0.005134579467244592, + "grad_norm": 3.8149781227111816, + "learning_rate": 4.999998456997326e-05, + "loss": 5.8779, + "step": 630 + }, + { + "epoch": 0.005379083251399097, + "grad_norm": 3.840543270111084, + "learning_rate": 4.999997662670628e-05, + "loss": 5.805, + "step": 660 + }, + { + "epoch": 0.005623587035553601, + "grad_norm": 3.4208931922912598, + "learning_rate": 4.999996704000589e-05, + "loss": 5.6992, + "step": 690 + }, + { + "epoch": 0.0058680908197081055, + "grad_norm": 3.2975683212280273, + "learning_rate": 4.99999558098727e-05, + "loss": 5.6531, + "step": 720 + }, + { + "epoch": 0.00611259460386261, + "grad_norm": 4.05631160736084, + "learning_rate": 4.9999942936307445e-05, + "loss": 5.554, + "step": 750 + }, + { + "epoch": 0.006357098388017114, + "grad_norm": 3.1539864540100098, + "learning_rate": 4.9999928419310994e-05, + "loss": 5.4931, + "step": 780 + }, + { + "epoch": 0.006601602172171619, + "grad_norm": 4.811732292175293, + "learning_rate": 4.999991225888427e-05, + "loss": 5.4204, + "step": 810 + }, + { + "epoch": 0.006846105956326123, + "grad_norm": 2.9593210220336914, + "learning_rate": 4.999989445502837e-05, + "loss": 5.3687, + "step": 840 + }, + { + "epoch": 0.007090609740480627, + "grad_norm": 3.942239284515381, + "learning_rate": 4.9999875007744436e-05, + "loss": 5.3238, + "step": 870 + }, + { + "epoch": 0.007335113524635131, + "grad_norm": 2.29752254486084, + "learning_rate": 4.9999853917033756e-05, + "loss": 5.2423, + "step": 900 + }, + { + "epoch": 0.007579617308789636, + "grad_norm": 2.243770122528076, + "learning_rate": 4.999983118289773e-05, + "loss": 5.2384, + "step": 930 + }, + { + "epoch": 0.00782412109294414, + "grad_norm": 2.5572686195373535, + "learning_rate": 4.999980680533782e-05, + "loss": 5.1761, + "step": 960 + }, + { + "epoch": 0.008068624877098645, + "grad_norm": 2.4739913940429688, + "learning_rate": 4.999978078435567e-05, + "loss": 5.1215, + "step": 990 + }, + { + "epoch": 0.008313128661253149, + "grad_norm": 1.6927807331085205, + "learning_rate": 4.999975311995295e-05, + "loss": 5.1264, + "step": 1020 + }, + { + "epoch": 0.008557632445407654, + "grad_norm": 2.3166885375976562, + "learning_rate": 4.99997238121315e-05, + "loss": 5.0469, + "step": 1050 + }, + { + "epoch": 0.008802136229562158, + "grad_norm": 1.69430410861969, + "learning_rate": 4.999969286089325e-05, + "loss": 5.005, + "step": 1080 + }, + { + "epoch": 0.009046640013716663, + "grad_norm": 2.0859012603759766, + "learning_rate": 4.9999660266240235e-05, + "loss": 4.9693, + "step": 1110 + }, + { + "epoch": 0.009291143797871167, + "grad_norm": 1.7291619777679443, + "learning_rate": 4.9999626028174585e-05, + "loss": 4.9352, + "step": 1140 + }, + { + "epoch": 0.00953564758202567, + "grad_norm": 1.5966330766677856, + "learning_rate": 4.999959014669856e-05, + "loss": 4.87, + "step": 1170 + }, + { + "epoch": 0.009780151366180176, + "grad_norm": 1.9011143445968628, + "learning_rate": 4.9999552621814513e-05, + "loss": 4.8772, + "step": 1200 + }, + { + "epoch": 0.010024655150334679, + "grad_norm": 1.701004981994629, + "learning_rate": 4.9999513453524917e-05, + "loss": 4.8484, + "step": 1230 + }, + { + "epoch": 0.010269158934489184, + "grad_norm": 2.171808958053589, + "learning_rate": 4.9999472641832336e-05, + "loss": 4.8233, + "step": 1260 + }, + { + "epoch": 0.010513662718643688, + "grad_norm": 1.4896094799041748, + "learning_rate": 4.999943018673946e-05, + "loss": 4.7921, + "step": 1290 + }, + { + "epoch": 0.010758166502798193, + "grad_norm": 2.112971544265747, + "learning_rate": 4.999938608824909e-05, + "loss": 4.7908, + "step": 1320 + }, + { + "epoch": 0.011002670286952697, + "grad_norm": 1.7113062143325806, + "learning_rate": 4.999934034636411e-05, + "loss": 4.7186, + "step": 1350 + }, + { + "epoch": 0.011247174071107202, + "grad_norm": 2.099663019180298, + "learning_rate": 4.999929296108753e-05, + "loss": 4.7112, + "step": 1380 + }, + { + "epoch": 0.011491677855261706, + "grad_norm": 1.40205979347229, + "learning_rate": 4.9999243932422466e-05, + "loss": 4.6701, + "step": 1410 + }, + { + "epoch": 0.011736181639416211, + "grad_norm": 1.53098726272583, + "learning_rate": 4.999919326037215e-05, + "loss": 4.6786, + "step": 1440 + }, + { + "epoch": 0.011980685423570715, + "grad_norm": 1.7461609840393066, + "learning_rate": 4.99991409449399e-05, + "loss": 4.6614, + "step": 1470 + }, + { + "epoch": 0.01222518920772522, + "grad_norm": 1.6045819520950317, + "learning_rate": 4.999908698612916e-05, + "loss": 4.6233, + "step": 1500 + }, + { + "epoch": 0.012469692991879723, + "grad_norm": 1.351077675819397, + "learning_rate": 4.9999031383943486e-05, + "loss": 4.626, + "step": 1530 + }, + { + "epoch": 0.012714196776034229, + "grad_norm": 1.4363055229187012, + "learning_rate": 4.999897413838651e-05, + "loss": 4.5781, + "step": 1560 + }, + { + "epoch": 0.012958700560188732, + "grad_norm": 1.1321336030960083, + "learning_rate": 4.999891524946202e-05, + "loss": 4.5492, + "step": 1590 + }, + { + "epoch": 0.013203204344343238, + "grad_norm": 1.2755215167999268, + "learning_rate": 4.999885471717387e-05, + "loss": 4.5444, + "step": 1620 + }, + { + "epoch": 0.013447708128497741, + "grad_norm": 1.471504807472229, + "learning_rate": 4.999879254152605e-05, + "loss": 4.5247, + "step": 1650 + }, + { + "epoch": 0.013692211912652246, + "grad_norm": 1.3664900064468384, + "learning_rate": 4.999872872252265e-05, + "loss": 4.5158, + "step": 1680 + }, + { + "epoch": 0.01393671569680675, + "grad_norm": 1.360611915588379, + "learning_rate": 4.999866326016785e-05, + "loss": 4.498, + "step": 1710 + }, + { + "epoch": 0.014181219480961254, + "grad_norm": 1.43533456325531, + "learning_rate": 4.999859615446596e-05, + "loss": 4.5035, + "step": 1740 + }, + { + "epoch": 0.014425723265115759, + "grad_norm": 1.4639947414398193, + "learning_rate": 4.99985274054214e-05, + "loss": 4.4533, + "step": 1770 + }, + { + "epoch": 0.014670227049270262, + "grad_norm": 1.438585638999939, + "learning_rate": 4.999845701303868e-05, + "loss": 4.4484, + "step": 1800 + }, + { + "epoch": 0.014914730833424768, + "grad_norm": 1.1868066787719727, + "learning_rate": 4.999838497732243e-05, + "loss": 4.4516, + "step": 1830 + }, + { + "epoch": 0.015159234617579271, + "grad_norm": 1.1804672479629517, + "learning_rate": 4.999831129827739e-05, + "loss": 4.4378, + "step": 1860 + }, + { + "epoch": 0.015403738401733777, + "grad_norm": 1.127652645111084, + "learning_rate": 4.9998235975908394e-05, + "loss": 4.4, + "step": 1890 + }, + { + "epoch": 0.01564824218588828, + "grad_norm": 1.2617137432098389, + "learning_rate": 4.99981590102204e-05, + "loss": 4.3802, + "step": 1920 + }, + { + "epoch": 0.015892745970042785, + "grad_norm": 1.4249097108840942, + "learning_rate": 4.9998080401218464e-05, + "loss": 4.3689, + "step": 1950 + }, + { + "epoch": 0.01613724975419729, + "grad_norm": 1.0717036724090576, + "learning_rate": 4.999800014890777e-05, + "loss": 4.3645, + "step": 1980 + }, + { + "epoch": 0.016381753538351793, + "grad_norm": 1.414404273033142, + "learning_rate": 4.9997918253293555e-05, + "loss": 4.343, + "step": 2010 + }, + { + "epoch": 0.016626257322506298, + "grad_norm": 1.1941649913787842, + "learning_rate": 4.999783471438124e-05, + "loss": 4.3271, + "step": 2040 + }, + { + "epoch": 0.016870761106660803, + "grad_norm": 1.0737369060516357, + "learning_rate": 4.999774953217631e-05, + "loss": 4.3223, + "step": 2070 + }, + { + "epoch": 0.01711526489081531, + "grad_norm": 1.2367453575134277, + "learning_rate": 4.9997662706684345e-05, + "loss": 4.3106, + "step": 2100 + }, + { + "epoch": 0.01735976867496981, + "grad_norm": 1.0919674634933472, + "learning_rate": 4.999757423791107e-05, + "loss": 4.2916, + "step": 2130 + }, + { + "epoch": 0.017604272459124316, + "grad_norm": 1.2348527908325195, + "learning_rate": 4.9997484125862306e-05, + "loss": 4.2935, + "step": 2160 + }, + { + "epoch": 0.01784877624327882, + "grad_norm": 1.173093318939209, + "learning_rate": 4.999739237054395e-05, + "loss": 4.2761, + "step": 2190 + }, + { + "epoch": 0.018093280027433326, + "grad_norm": 1.1982988119125366, + "learning_rate": 4.9997298971962065e-05, + "loss": 4.2619, + "step": 2220 + }, + { + "epoch": 0.018337783811587828, + "grad_norm": 1.4401408433914185, + "learning_rate": 4.999720393012277e-05, + "loss": 4.2672, + "step": 2250 + }, + { + "epoch": 0.018582287595742333, + "grad_norm": 1.0311753749847412, + "learning_rate": 4.999710724503233e-05, + "loss": 4.2272, + "step": 2280 + }, + { + "epoch": 0.01882679137989684, + "grad_norm": 1.4242304563522339, + "learning_rate": 4.9997008916697075e-05, + "loss": 4.2415, + "step": 2310 + }, + { + "epoch": 0.01907129516405134, + "grad_norm": 1.039548397064209, + "learning_rate": 4.999690894512349e-05, + "loss": 4.2086, + "step": 2340 + }, + { + "epoch": 0.019315798948205846, + "grad_norm": 1.1213486194610596, + "learning_rate": 4.999680733031814e-05, + "loss": 4.2043, + "step": 2370 + }, + { + "epoch": 0.01956030273236035, + "grad_norm": 1.0591011047363281, + "learning_rate": 4.9996704072287716e-05, + "loss": 4.1894, + "step": 2400 + }, + { + "epoch": 0.019804806516514856, + "grad_norm": 1.0503829717636108, + "learning_rate": 4.9996599171038984e-05, + "loss": 4.184, + "step": 2430 + }, + { + "epoch": 0.020049310300669358, + "grad_norm": 1.068516492843628, + "learning_rate": 4.999649262657886e-05, + "loss": 4.1918, + "step": 2460 + }, + { + "epoch": 0.020293814084823864, + "grad_norm": 0.9620047211647034, + "learning_rate": 4.999638443891434e-05, + "loss": 4.182, + "step": 2490 + }, + { + "epoch": 0.02053831786897837, + "grad_norm": 0.9728343486785889, + "learning_rate": 4.999627460805253e-05, + "loss": 4.1746, + "step": 2520 + }, + { + "epoch": 0.020782821653132874, + "grad_norm": 0.990840494632721, + "learning_rate": 4.999616313400066e-05, + "loss": 4.1339, + "step": 2550 + }, + { + "epoch": 0.021027325437287376, + "grad_norm": 1.0678601264953613, + "learning_rate": 4.999605001676605e-05, + "loss": 4.1548, + "step": 2580 + }, + { + "epoch": 0.02127182922144188, + "grad_norm": 1.1837236881256104, + "learning_rate": 4.9995935256356144e-05, + "loss": 4.1508, + "step": 2610 + }, + { + "epoch": 0.021516333005596387, + "grad_norm": 1.0381749868392944, + "learning_rate": 4.9995818852778476e-05, + "loss": 4.0977, + "step": 2640 + }, + { + "epoch": 0.021760836789750892, + "grad_norm": 0.9649094343185425, + "learning_rate": 4.999570080604071e-05, + "loss": 4.1405, + "step": 2670 + }, + { + "epoch": 0.022005340573905394, + "grad_norm": 1.1858196258544922, + "learning_rate": 4.99955811161506e-05, + "loss": 4.1242, + "step": 2700 + }, + { + "epoch": 0.0222498443580599, + "grad_norm": 1.035134196281433, + "learning_rate": 4.9995459783116004e-05, + "loss": 4.1042, + "step": 2730 + }, + { + "epoch": 0.022494348142214404, + "grad_norm": 1.0874470472335815, + "learning_rate": 4.999533680694493e-05, + "loss": 4.0951, + "step": 2760 + }, + { + "epoch": 0.02273885192636891, + "grad_norm": 1.5923190116882324, + "learning_rate": 4.9995212187645416e-05, + "loss": 4.1031, + "step": 2790 + }, + { + "epoch": 0.02298335571052341, + "grad_norm": 0.935354471206665, + "learning_rate": 4.9995085925225693e-05, + "loss": 4.0804, + "step": 2820 + }, + { + "epoch": 0.023227859494677917, + "grad_norm": 1.6059459447860718, + "learning_rate": 4.999495801969404e-05, + "loss": 4.0703, + "step": 2850 + }, + { + "epoch": 0.023472363278832422, + "grad_norm": 1.1116045713424683, + "learning_rate": 4.9994828471058876e-05, + "loss": 4.0579, + "step": 2880 + }, + { + "epoch": 0.023716867062986924, + "grad_norm": 1.0068392753601074, + "learning_rate": 4.9994697279328714e-05, + "loss": 4.0635, + "step": 2910 + }, + { + "epoch": 0.02396137084714143, + "grad_norm": 0.9715657234191895, + "learning_rate": 4.9994564444512176e-05, + "loss": 4.068, + "step": 2940 + }, + { + "epoch": 0.024205874631295934, + "grad_norm": 1.1550418138504028, + "learning_rate": 4.9994429966618e-05, + "loss": 4.0416, + "step": 2970 + }, + { + "epoch": 0.02445037841545044, + "grad_norm": 1.1463241577148438, + "learning_rate": 4.999429384565502e-05, + "loss": 4.0224, + "step": 3000 + }, + { + "epoch": 0.02469488219960494, + "grad_norm": 0.9614786505699158, + "learning_rate": 4.999415608163217e-05, + "loss": 4.0092, + "step": 3030 + }, + { + "epoch": 0.024939385983759447, + "grad_norm": 1.0928614139556885, + "learning_rate": 4.999401667455854e-05, + "loss": 4.0083, + "step": 3060 + }, + { + "epoch": 0.025183889767913952, + "grad_norm": 1.0884851217269897, + "learning_rate": 4.9993875624443274e-05, + "loss": 4.0251, + "step": 3090 + }, + { + "epoch": 0.025428393552068457, + "grad_norm": 1.0158635377883911, + "learning_rate": 4.9993732931295646e-05, + "loss": 3.9629, + "step": 3120 + }, + { + "epoch": 0.02567289733622296, + "grad_norm": 1.0032926797866821, + "learning_rate": 4.999358859512503e-05, + "loss": 4.0015, + "step": 3150 + }, + { + "epoch": 0.025917401120377465, + "grad_norm": 0.9303460121154785, + "learning_rate": 4.9993442615940936e-05, + "loss": 4.0066, + "step": 3180 + }, + { + "epoch": 0.02616190490453197, + "grad_norm": 1.0750280618667603, + "learning_rate": 4.999329499375292e-05, + "loss": 3.9937, + "step": 3210 + }, + { + "epoch": 0.026406408688686475, + "grad_norm": 1.2123857736587524, + "learning_rate": 4.999314572857074e-05, + "loss": 3.9891, + "step": 3240 + }, + { + "epoch": 0.026650912472840977, + "grad_norm": 0.988030731678009, + "learning_rate": 4.9992994820404174e-05, + "loss": 3.9779, + "step": 3270 + }, + { + "epoch": 0.026895416256995482, + "grad_norm": 1.0025092363357544, + "learning_rate": 4.999284226926314e-05, + "loss": 3.9684, + "step": 3300 + }, + { + "epoch": 0.027139920041149988, + "grad_norm": 0.9497402310371399, + "learning_rate": 4.999268807515768e-05, + "loss": 3.9542, + "step": 3330 + }, + { + "epoch": 0.027384423825304493, + "grad_norm": 0.9898300766944885, + "learning_rate": 4.999253223809792e-05, + "loss": 3.9462, + "step": 3360 + }, + { + "epoch": 0.027628927609458995, + "grad_norm": 0.8951054215431213, + "learning_rate": 4.999237475809411e-05, + "loss": 3.9535, + "step": 3390 + }, + { + "epoch": 0.0278734313936135, + "grad_norm": 1.1410372257232666, + "learning_rate": 4.99922156351566e-05, + "loss": 3.9701, + "step": 3420 + }, + { + "epoch": 0.028117935177768005, + "grad_norm": 0.9642576575279236, + "learning_rate": 4.999205486929586e-05, + "loss": 3.9532, + "step": 3450 + }, + { + "epoch": 0.028362438961922507, + "grad_norm": 0.9304039478302002, + "learning_rate": 4.999189246052245e-05, + "loss": 3.9335, + "step": 3480 + }, + { + "epoch": 0.028606942746077012, + "grad_norm": 0.9684301614761353, + "learning_rate": 4.999172840884704e-05, + "loss": 3.9289, + "step": 3510 + }, + { + "epoch": 0.028851446530231518, + "grad_norm": 0.9566449522972107, + "learning_rate": 4.999156271428043e-05, + "loss": 3.92, + "step": 3540 + }, + { + "epoch": 0.029095950314386023, + "grad_norm": 0.9796332120895386, + "learning_rate": 4.9991395376833496e-05, + "loss": 3.9173, + "step": 3570 + }, + { + "epoch": 0.029340454098540525, + "grad_norm": 0.939052164554596, + "learning_rate": 4.999122639651725e-05, + "loss": 3.8938, + "step": 3600 + }, + { + "epoch": 0.02958495788269503, + "grad_norm": 0.8773409724235535, + "learning_rate": 4.9991055773342795e-05, + "loss": 3.9225, + "step": 3630 + }, + { + "epoch": 0.029829461666849535, + "grad_norm": 0.9443736672401428, + "learning_rate": 4.9990883507321354e-05, + "loss": 3.8889, + "step": 3660 + }, + { + "epoch": 0.03007396545100404, + "grad_norm": 0.9129781723022461, + "learning_rate": 4.999070959846424e-05, + "loss": 3.894, + "step": 3690 + }, + { + "epoch": 0.030318469235158543, + "grad_norm": 0.988441526889801, + "learning_rate": 4.999053404678289e-05, + "loss": 3.8968, + "step": 3720 + }, + { + "epoch": 0.030562973019313048, + "grad_norm": 1.0134508609771729, + "learning_rate": 4.999035685228884e-05, + "loss": 3.873, + "step": 3750 + }, + { + "epoch": 0.030807476803467553, + "grad_norm": 0.9853084087371826, + "learning_rate": 4.999017801499375e-05, + "loss": 3.8921, + "step": 3780 + }, + { + "epoch": 0.03105198058762206, + "grad_norm": 1.0263316631317139, + "learning_rate": 4.998999753490937e-05, + "loss": 3.8691, + "step": 3810 + }, + { + "epoch": 0.03129648437177656, + "grad_norm": 1.2496927976608276, + "learning_rate": 4.998981541204757e-05, + "loss": 3.8631, + "step": 3840 + }, + { + "epoch": 0.031540988155931066, + "grad_norm": 1.2647733688354492, + "learning_rate": 4.998963164642031e-05, + "loss": 3.8676, + "step": 3870 + }, + { + "epoch": 0.03178549194008557, + "grad_norm": 0.8532811403274536, + "learning_rate": 4.9989446238039676e-05, + "loss": 3.8196, + "step": 3900 + }, + { + "epoch": 0.032029995724240076, + "grad_norm": 0.9394932985305786, + "learning_rate": 4.998925918691786e-05, + "loss": 3.8464, + "step": 3930 + }, + { + "epoch": 0.03227449950839458, + "grad_norm": 0.9216808676719666, + "learning_rate": 4.998907049306715e-05, + "loss": 3.8294, + "step": 3960 + }, + { + "epoch": 0.03251900329254909, + "grad_norm": 0.9735797047615051, + "learning_rate": 4.998888015649996e-05, + "loss": 3.8412, + "step": 3990 + }, + { + "epoch": 0.032763507076703585, + "grad_norm": 0.9803655743598938, + "learning_rate": 4.99886881772288e-05, + "loss": 3.834, + "step": 4020 + }, + { + "epoch": 0.03300801086085809, + "grad_norm": 0.8677938580513, + "learning_rate": 4.998849455526628e-05, + "loss": 3.8325, + "step": 4050 + }, + { + "epoch": 0.033252514645012596, + "grad_norm": 1.0530720949172974, + "learning_rate": 4.998829929062515e-05, + "loss": 3.8125, + "step": 4080 + }, + { + "epoch": 0.0334970184291671, + "grad_norm": 0.9517835378646851, + "learning_rate": 4.998810238331822e-05, + "loss": 3.8079, + "step": 4110 + }, + { + "epoch": 0.033741522213321606, + "grad_norm": 0.9056320190429688, + "learning_rate": 4.998790383335845e-05, + "loss": 3.8023, + "step": 4140 + }, + { + "epoch": 0.03398602599747611, + "grad_norm": 1.042631983757019, + "learning_rate": 4.9987703640758894e-05, + "loss": 3.8089, + "step": 4170 + }, + { + "epoch": 0.03423052978163062, + "grad_norm": 1.003548502922058, + "learning_rate": 4.99875018055327e-05, + "loss": 3.8088, + "step": 4200 + }, + { + "epoch": 0.034475033565785115, + "grad_norm": 0.9754064083099365, + "learning_rate": 4.998729832769315e-05, + "loss": 3.7969, + "step": 4230 + }, + { + "epoch": 0.03471953734993962, + "grad_norm": 0.9784180521965027, + "learning_rate": 4.998709320725361e-05, + "loss": 3.8066, + "step": 4260 + }, + { + "epoch": 0.034964041134094126, + "grad_norm": 0.9522731900215149, + "learning_rate": 4.998688644422756e-05, + "loss": 3.8062, + "step": 4290 + }, + { + "epoch": 0.03520854491824863, + "grad_norm": 1.0362435579299927, + "learning_rate": 4.998667803862861e-05, + "loss": 3.8007, + "step": 4320 + }, + { + "epoch": 0.03545304870240314, + "grad_norm": 0.899608850479126, + "learning_rate": 4.9986467990470445e-05, + "loss": 3.8013, + "step": 4350 + }, + { + "epoch": 0.03569755248655764, + "grad_norm": 0.9725902676582336, + "learning_rate": 4.998625629976688e-05, + "loss": 3.7688, + "step": 4380 + }, + { + "epoch": 0.03594205627071215, + "grad_norm": 0.9904030561447144, + "learning_rate": 4.998604296653182e-05, + "loss": 3.7503, + "step": 4410 + }, + { + "epoch": 0.03618656005486665, + "grad_norm": 0.9177742004394531, + "learning_rate": 4.99858279907793e-05, + "loss": 3.7664, + "step": 4440 + }, + { + "epoch": 0.03643106383902115, + "grad_norm": 0.964356005191803, + "learning_rate": 4.998561137252346e-05, + "loss": 3.7574, + "step": 4470 + }, + { + "epoch": 0.036675567623175656, + "grad_norm": 1.0006535053253174, + "learning_rate": 4.9985393111778525e-05, + "loss": 3.7616, + "step": 4500 + }, + { + "epoch": 0.03692007140733016, + "grad_norm": 0.9334198236465454, + "learning_rate": 4.998517320855884e-05, + "loss": 3.776, + "step": 4530 + }, + { + "epoch": 0.03716457519148467, + "grad_norm": 0.9192038774490356, + "learning_rate": 4.998495166287887e-05, + "loss": 3.7552, + "step": 4560 + }, + { + "epoch": 0.03740907897563917, + "grad_norm": 0.9278284311294556, + "learning_rate": 4.998472847475318e-05, + "loss": 3.7508, + "step": 4590 + }, + { + "epoch": 0.03765358275979368, + "grad_norm": 0.870951771736145, + "learning_rate": 4.998450364419643e-05, + "loss": 3.7419, + "step": 4620 + }, + { + "epoch": 0.03789808654394818, + "grad_norm": 0.8986937403678894, + "learning_rate": 4.998427717122342e-05, + "loss": 3.7285, + "step": 4650 + }, + { + "epoch": 0.03814259032810268, + "grad_norm": 0.8922073841094971, + "learning_rate": 4.9984049055849024e-05, + "loss": 3.7252, + "step": 4680 + }, + { + "epoch": 0.038387094112257186, + "grad_norm": 0.8716844916343689, + "learning_rate": 4.9983819298088234e-05, + "loss": 3.7243, + "step": 4710 + }, + { + "epoch": 0.03863159789641169, + "grad_norm": 0.9074581861495972, + "learning_rate": 4.9983587897956166e-05, + "loss": 3.7254, + "step": 4740 + }, + { + "epoch": 0.0388761016805662, + "grad_norm": 0.8903953433036804, + "learning_rate": 4.998335485546802e-05, + "loss": 3.7363, + "step": 4770 + }, + { + "epoch": 0.0391206054647207, + "grad_norm": 1.0858160257339478, + "learning_rate": 4.998312017063912e-05, + "loss": 3.708, + "step": 4800 + }, + { + "epoch": 0.03936510924887521, + "grad_norm": 1.1008403301239014, + "learning_rate": 4.9982883843484895e-05, + "loss": 3.727, + "step": 4830 + }, + { + "epoch": 0.03960961303302971, + "grad_norm": 0.9895632863044739, + "learning_rate": 4.998264587402088e-05, + "loss": 3.7383, + "step": 4860 + }, + { + "epoch": 0.03985411681718422, + "grad_norm": 1.0174016952514648, + "learning_rate": 4.9982406262262726e-05, + "loss": 3.6967, + "step": 4890 + }, + { + "epoch": 0.040098620601338716, + "grad_norm": 0.9164692163467407, + "learning_rate": 4.9982165008226175e-05, + "loss": 3.7038, + "step": 4920 + }, + { + "epoch": 0.04034312438549322, + "grad_norm": 0.8442469835281372, + "learning_rate": 4.998192211192708e-05, + "loss": 3.7045, + "step": 4950 + }, + { + "epoch": 0.04058762816964773, + "grad_norm": 0.9134944677352905, + "learning_rate": 4.9981677573381423e-05, + "loss": 3.6909, + "step": 4980 + }, + { + "epoch": 0.04083213195380223, + "grad_norm": 0.9397178292274475, + "learning_rate": 4.9981431392605274e-05, + "loss": 3.7087, + "step": 5010 + }, + { + "epoch": 0.04107663573795674, + "grad_norm": 1.1442530155181885, + "learning_rate": 4.998118356961481e-05, + "loss": 3.6804, + "step": 5040 + }, + { + "epoch": 0.04132113952211124, + "grad_norm": 0.8651236891746521, + "learning_rate": 4.998093410442632e-05, + "loss": 3.668, + "step": 5070 + }, + { + "epoch": 0.04156564330626575, + "grad_norm": 1.0404279232025146, + "learning_rate": 4.998068299705623e-05, + "loss": 3.673, + "step": 5100 + }, + { + "epoch": 0.041810147090420254, + "grad_norm": 0.9545896053314209, + "learning_rate": 4.9980430247521014e-05, + "loss": 3.6635, + "step": 5130 + }, + { + "epoch": 0.04205465087457475, + "grad_norm": 0.9344103932380676, + "learning_rate": 4.998017585583731e-05, + "loss": 3.6834, + "step": 5160 + }, + { + "epoch": 0.04229915465872926, + "grad_norm": 1.0414049625396729, + "learning_rate": 4.9979919822021824e-05, + "loss": 3.6868, + "step": 5190 + }, + { + "epoch": 0.04254365844288376, + "grad_norm": 0.9391636252403259, + "learning_rate": 4.99796621460914e-05, + "loss": 3.6671, + "step": 5220 + }, + { + "epoch": 0.04278816222703827, + "grad_norm": 0.8789961934089661, + "learning_rate": 4.9979402828062963e-05, + "loss": 3.6575, + "step": 5250 + }, + { + "epoch": 0.04303266601119277, + "grad_norm": 0.8953958749771118, + "learning_rate": 4.997914186795358e-05, + "loss": 3.6741, + "step": 5280 + }, + { + "epoch": 0.04327716979534728, + "grad_norm": 0.8470034003257751, + "learning_rate": 4.9978879265780385e-05, + "loss": 3.6517, + "step": 5310 + }, + { + "epoch": 0.043521673579501784, + "grad_norm": 0.8863801956176758, + "learning_rate": 4.997861502156066e-05, + "loss": 3.6667, + "step": 5340 + }, + { + "epoch": 0.04376617736365628, + "grad_norm": 0.9392006993293762, + "learning_rate": 4.997834913531176e-05, + "loss": 3.6278, + "step": 5370 + }, + { + "epoch": 0.04401068114781079, + "grad_norm": 0.8935637474060059, + "learning_rate": 4.9978081607051176e-05, + "loss": 3.6656, + "step": 5400 + }, + { + "epoch": 0.04425518493196529, + "grad_norm": 0.9051403403282166, + "learning_rate": 4.997781243679648e-05, + "loss": 3.6439, + "step": 5430 + }, + { + "epoch": 0.0444996887161198, + "grad_norm": 0.9147741794586182, + "learning_rate": 4.9977541624565374e-05, + "loss": 3.6362, + "step": 5460 + }, + { + "epoch": 0.0447441925002743, + "grad_norm": 0.8926962614059448, + "learning_rate": 4.9977269170375665e-05, + "loss": 3.6179, + "step": 5490 + }, + { + "epoch": 0.04498869628442881, + "grad_norm": 0.9078648686408997, + "learning_rate": 4.997699507424526e-05, + "loss": 3.6323, + "step": 5520 + }, + { + "epoch": 0.045233200068583314, + "grad_norm": 0.8844118118286133, + "learning_rate": 4.997671933619218e-05, + "loss": 3.6219, + "step": 5550 + }, + { + "epoch": 0.04547770385273782, + "grad_norm": 0.9098038077354431, + "learning_rate": 4.9976441956234546e-05, + "loss": 3.6341, + "step": 5580 + }, + { + "epoch": 0.04572220763689232, + "grad_norm": 0.8704332709312439, + "learning_rate": 4.99761629343906e-05, + "loss": 3.6038, + "step": 5610 + }, + { + "epoch": 0.04596671142104682, + "grad_norm": 0.9353320002555847, + "learning_rate": 4.9975882270678676e-05, + "loss": 3.6171, + "step": 5640 + }, + { + "epoch": 0.04621121520520133, + "grad_norm": 1.0025008916854858, + "learning_rate": 4.997559996511723e-05, + "loss": 3.6084, + "step": 5670 + }, + { + "epoch": 0.04645571898935583, + "grad_norm": 0.8339236974716187, + "learning_rate": 4.997531601772481e-05, + "loss": 3.6271, + "step": 5700 + }, + { + "epoch": 0.04670022277351034, + "grad_norm": 0.8991410136222839, + "learning_rate": 4.9975030428520084e-05, + "loss": 3.5897, + "step": 5730 + }, + { + "epoch": 0.046944726557664844, + "grad_norm": 0.8854517936706543, + "learning_rate": 4.997474319752184e-05, + "loss": 3.603, + "step": 5760 + }, + { + "epoch": 0.04718923034181935, + "grad_norm": 0.9445385932922363, + "learning_rate": 4.997445432474895e-05, + "loss": 3.6084, + "step": 5790 + }, + { + "epoch": 0.04743373412597385, + "grad_norm": 0.871096134185791, + "learning_rate": 4.9974163810220406e-05, + "loss": 3.5892, + "step": 5820 + }, + { + "epoch": 0.04767823791012835, + "grad_norm": 0.8828051686286926, + "learning_rate": 4.99738716539553e-05, + "loss": 3.6075, + "step": 5850 + }, + { + "epoch": 0.04792274169428286, + "grad_norm": 0.8362672328948975, + "learning_rate": 4.997357785597284e-05, + "loss": 3.5772, + "step": 5880 + }, + { + "epoch": 0.048167245478437364, + "grad_norm": 0.9075796008110046, + "learning_rate": 4.997328241629234e-05, + "loss": 3.5881, + "step": 5910 + }, + { + "epoch": 0.04841174926259187, + "grad_norm": 0.9475303888320923, + "learning_rate": 4.997298533493323e-05, + "loss": 3.5963, + "step": 5940 + }, + { + "epoch": 0.048656253046746374, + "grad_norm": 0.807908296585083, + "learning_rate": 4.997268661191503e-05, + "loss": 3.5776, + "step": 5970 + }, + { + "epoch": 0.04890075683090088, + "grad_norm": 0.9100547432899475, + "learning_rate": 4.9972386247257385e-05, + "loss": 3.5924, + "step": 6000 + }, + { + "epoch": 0.049145260615055385, + "grad_norm": 0.79295814037323, + "learning_rate": 4.9972084240980025e-05, + "loss": 3.5603, + "step": 6030 + }, + { + "epoch": 0.04938976439920988, + "grad_norm": 0.8548495173454285, + "learning_rate": 4.997178059310281e-05, + "loss": 3.5624, + "step": 6060 + }, + { + "epoch": 0.04963426818336439, + "grad_norm": 0.8862835168838501, + "learning_rate": 4.997147530364571e-05, + "loss": 3.5738, + "step": 6090 + }, + { + "epoch": 0.049878771967518894, + "grad_norm": 1.0974136590957642, + "learning_rate": 4.9971168372628793e-05, + "loss": 3.5549, + "step": 6120 + }, + { + "epoch": 0.0501232757516734, + "grad_norm": 0.9224910140037537, + "learning_rate": 4.997085980007222e-05, + "loss": 3.5632, + "step": 6150 + }, + { + "epoch": 0.050367779535827904, + "grad_norm": 0.8416465520858765, + "learning_rate": 4.99705495859963e-05, + "loss": 3.5605, + "step": 6180 + }, + { + "epoch": 0.05061228331998241, + "grad_norm": 0.9750398993492126, + "learning_rate": 4.99702377304214e-05, + "loss": 3.5652, + "step": 6210 + }, + { + "epoch": 0.050856787104136915, + "grad_norm": 1.0585671663284302, + "learning_rate": 4.9969924233368036e-05, + "loss": 3.5605, + "step": 6240 + }, + { + "epoch": 0.05110129088829142, + "grad_norm": 1.0180178880691528, + "learning_rate": 4.996960909485681e-05, + "loss": 3.5543, + "step": 6270 + }, + { + "epoch": 0.05134579467244592, + "grad_norm": 0.8081057667732239, + "learning_rate": 4.9969292314908446e-05, + "loss": 3.5551, + "step": 6300 + }, + { + "epoch": 0.051590298456600424, + "grad_norm": 0.8196061253547668, + "learning_rate": 4.996897389354376e-05, + "loss": 3.5566, + "step": 6330 + }, + { + "epoch": 0.05183480224075493, + "grad_norm": 0.9229000806808472, + "learning_rate": 4.9968653830783684e-05, + "loss": 3.5439, + "step": 6360 + }, + { + "epoch": 0.052079306024909434, + "grad_norm": 0.8422220945358276, + "learning_rate": 4.996833212664927e-05, + "loss": 3.5413, + "step": 6390 + }, + { + "epoch": 0.05232380980906394, + "grad_norm": 0.9290680885314941, + "learning_rate": 4.996800878116166e-05, + "loss": 3.5245, + "step": 6420 + }, + { + "epoch": 0.052568313593218445, + "grad_norm": 0.7990279793739319, + "learning_rate": 4.99676837943421e-05, + "loss": 3.5519, + "step": 6450 + }, + { + "epoch": 0.05281281737737295, + "grad_norm": 0.8968884348869324, + "learning_rate": 4.996735716621196e-05, + "loss": 3.5306, + "step": 6480 + }, + { + "epoch": 0.05305732116152745, + "grad_norm": 0.8280816674232483, + "learning_rate": 4.996702889679272e-05, + "loss": 3.5407, + "step": 6510 + }, + { + "epoch": 0.053301824945681954, + "grad_norm": 0.9637565612792969, + "learning_rate": 4.996669898610595e-05, + "loss": 3.5361, + "step": 6540 + }, + { + "epoch": 0.05354632872983646, + "grad_norm": 0.8355570435523987, + "learning_rate": 4.996636743417334e-05, + "loss": 3.5215, + "step": 6570 + }, + { + "epoch": 0.053790832513990965, + "grad_norm": 0.7987068295478821, + "learning_rate": 4.996603424101669e-05, + "loss": 3.5135, + "step": 6600 + }, + { + "epoch": 0.05403533629814547, + "grad_norm": 0.8417894840240479, + "learning_rate": 4.996569940665789e-05, + "loss": 3.5254, + "step": 6630 + }, + { + "epoch": 0.054279840082299975, + "grad_norm": 0.8814358115196228, + "learning_rate": 4.996536293111896e-05, + "loss": 3.5091, + "step": 6660 + }, + { + "epoch": 0.05452434386645448, + "grad_norm": 0.8625611662864685, + "learning_rate": 4.996502481442202e-05, + "loss": 3.5207, + "step": 6690 + }, + { + "epoch": 0.054768847650608986, + "grad_norm": 0.8265979886054993, + "learning_rate": 4.9964685056589314e-05, + "loss": 3.5011, + "step": 6720 + }, + { + "epoch": 0.055013351434763484, + "grad_norm": 0.8092629313468933, + "learning_rate": 4.996434365764314e-05, + "loss": 3.4897, + "step": 6750 + }, + { + "epoch": 0.05525785521891799, + "grad_norm": 0.917871356010437, + "learning_rate": 4.996400061760597e-05, + "loss": 3.5188, + "step": 6780 + }, + { + "epoch": 0.055502359003072495, + "grad_norm": 0.8477138876914978, + "learning_rate": 4.996365593650033e-05, + "loss": 3.5142, + "step": 6810 + }, + { + "epoch": 0.055746862787227, + "grad_norm": 0.7970532178878784, + "learning_rate": 4.99633096143489e-05, + "loss": 3.5112, + "step": 6840 + }, + { + "epoch": 0.055991366571381505, + "grad_norm": 0.8973976969718933, + "learning_rate": 4.9962961651174436e-05, + "loss": 3.5156, + "step": 6870 + }, + { + "epoch": 0.05623587035553601, + "grad_norm": 0.962066113948822, + "learning_rate": 4.9962612046999827e-05, + "loss": 3.4848, + "step": 6900 + }, + { + "epoch": 0.056480374139690516, + "grad_norm": 0.9020524621009827, + "learning_rate": 4.996226080184803e-05, + "loss": 3.503, + "step": 6930 + }, + { + "epoch": 0.056724877923845014, + "grad_norm": 0.8064045906066895, + "learning_rate": 4.996190791574215e-05, + "loss": 3.4982, + "step": 6960 + }, + { + "epoch": 0.05696938170799952, + "grad_norm": 0.8507282733917236, + "learning_rate": 4.996155338870538e-05, + "loss": 3.4713, + "step": 6990 + }, + { + "epoch": 0.057213885492154025, + "grad_norm": 0.8574703931808472, + "learning_rate": 4.9961197220761035e-05, + "loss": 3.4924, + "step": 7020 + }, + { + "epoch": 0.05745838927630853, + "grad_norm": 0.8214017152786255, + "learning_rate": 4.996083941193252e-05, + "loss": 3.4773, + "step": 7050 + }, + { + "epoch": 0.057702893060463036, + "grad_norm": 0.8267714977264404, + "learning_rate": 4.9960479962243367e-05, + "loss": 3.4836, + "step": 7080 + }, + { + "epoch": 0.05794739684461754, + "grad_norm": 0.8415769934654236, + "learning_rate": 4.996011887171719e-05, + "loss": 3.4871, + "step": 7110 + }, + { + "epoch": 0.058191900628772046, + "grad_norm": 0.9093459248542786, + "learning_rate": 4.995975614037773e-05, + "loss": 3.4777, + "step": 7140 + }, + { + "epoch": 0.05843640441292655, + "grad_norm": 0.865168571472168, + "learning_rate": 4.995939176824883e-05, + "loss": 3.4798, + "step": 7170 + }, + { + "epoch": 0.05868090819708105, + "grad_norm": 0.8812352418899536, + "learning_rate": 4.995902575535446e-05, + "loss": 3.4634, + "step": 7200 + }, + { + "epoch": 0.058925411981235555, + "grad_norm": 0.8318623900413513, + "learning_rate": 4.995865810171866e-05, + "loss": 3.4691, + "step": 7230 + }, + { + "epoch": 0.05916991576539006, + "grad_norm": 0.8475897312164307, + "learning_rate": 4.995828880736561e-05, + "loss": 3.4894, + "step": 7260 + }, + { + "epoch": 0.059414419549544566, + "grad_norm": 0.8317278027534485, + "learning_rate": 4.995791787231958e-05, + "loss": 3.4597, + "step": 7290 + }, + { + "epoch": 0.05965892333369907, + "grad_norm": 0.8189125657081604, + "learning_rate": 4.9957545296604965e-05, + "loss": 3.4495, + "step": 7320 + }, + { + "epoch": 0.059903427117853576, + "grad_norm": 0.8047211766242981, + "learning_rate": 4.9957171080246245e-05, + "loss": 3.4865, + "step": 7350 + }, + { + "epoch": 0.06014793090200808, + "grad_norm": 0.8353013396263123, + "learning_rate": 4.995679522326803e-05, + "loss": 3.4671, + "step": 7380 + }, + { + "epoch": 0.06039243468616259, + "grad_norm": 0.7747114300727844, + "learning_rate": 4.995641772569502e-05, + "loss": 3.4423, + "step": 7410 + }, + { + "epoch": 0.060636938470317085, + "grad_norm": 0.792785108089447, + "learning_rate": 4.995603858755203e-05, + "loss": 3.448, + "step": 7440 + }, + { + "epoch": 0.06088144225447159, + "grad_norm": 0.9009000658988953, + "learning_rate": 4.9955657808863985e-05, + "loss": 3.4471, + "step": 7470 + }, + { + "epoch": 0.061125946038626096, + "grad_norm": 0.8719607591629028, + "learning_rate": 4.995527538965593e-05, + "loss": 3.4512, + "step": 7500 + }, + { + "epoch": 0.0613704498227806, + "grad_norm": 0.840308666229248, + "learning_rate": 4.995489132995298e-05, + "loss": 3.4576, + "step": 7530 + }, + { + "epoch": 0.061614953606935106, + "grad_norm": 0.8216726779937744, + "learning_rate": 4.99545056297804e-05, + "loss": 3.4369, + "step": 7560 + }, + { + "epoch": 0.06185945739108961, + "grad_norm": 0.7841954827308655, + "learning_rate": 4.995411828916354e-05, + "loss": 3.4338, + "step": 7590 + }, + { + "epoch": 0.06210396117524412, + "grad_norm": 0.7832449674606323, + "learning_rate": 4.9953729308127874e-05, + "loss": 3.431, + "step": 7620 + }, + { + "epoch": 0.062348464959398615, + "grad_norm": 0.821207582950592, + "learning_rate": 4.995333868669895e-05, + "loss": 3.44, + "step": 7650 + }, + { + "epoch": 0.06259296874355312, + "grad_norm": 0.7705381512641907, + "learning_rate": 4.995294642490246e-05, + "loss": 3.4479, + "step": 7680 + }, + { + "epoch": 0.06283747252770763, + "grad_norm": 0.7850989699363708, + "learning_rate": 4.995255252276418e-05, + "loss": 3.4276, + "step": 7710 + }, + { + "epoch": 0.06308197631186213, + "grad_norm": 0.7798038721084595, + "learning_rate": 4.9952156980310016e-05, + "loss": 3.4376, + "step": 7740 + }, + { + "epoch": 0.06332648009601663, + "grad_norm": 0.8357362151145935, + "learning_rate": 4.9951759797565965e-05, + "loss": 3.4408, + "step": 7770 + }, + { + "epoch": 0.06357098388017114, + "grad_norm": 0.8627969622612, + "learning_rate": 4.995136097455815e-05, + "loss": 3.4328, + "step": 7800 + }, + { + "epoch": 0.06381548766432564, + "grad_norm": 0.7546987533569336, + "learning_rate": 4.995096051131276e-05, + "loss": 3.4168, + "step": 7830 + }, + { + "epoch": 0.06405999144848015, + "grad_norm": 0.8405272960662842, + "learning_rate": 4.995055840785614e-05, + "loss": 3.402, + "step": 7860 + }, + { + "epoch": 0.06430449523263465, + "grad_norm": 0.8333033919334412, + "learning_rate": 4.995015466421473e-05, + "loss": 3.4333, + "step": 7890 + }, + { + "epoch": 0.06454899901678916, + "grad_norm": 0.787031352519989, + "learning_rate": 4.9949749280415056e-05, + "loss": 3.4065, + "step": 7920 + }, + { + "epoch": 0.06479350280094366, + "grad_norm": 0.7898181676864624, + "learning_rate": 4.9949342256483766e-05, + "loss": 3.4023, + "step": 7950 + }, + { + "epoch": 0.06503800658509817, + "grad_norm": 0.7997327446937561, + "learning_rate": 4.9948933592447636e-05, + "loss": 3.4268, + "step": 7980 + }, + { + "epoch": 0.06528251036925267, + "grad_norm": 0.8030378818511963, + "learning_rate": 4.9948523288333506e-05, + "loss": 3.4106, + "step": 8010 + }, + { + "epoch": 0.06552701415340717, + "grad_norm": 0.8793920278549194, + "learning_rate": 4.994811134416836e-05, + "loss": 3.4075, + "step": 8040 + }, + { + "epoch": 0.06577151793756168, + "grad_norm": 0.810756504535675, + "learning_rate": 4.994769775997927e-05, + "loss": 3.4177, + "step": 8070 + }, + { + "epoch": 0.06601602172171618, + "grad_norm": 0.8193936347961426, + "learning_rate": 4.994728253579345e-05, + "loss": 3.4195, + "step": 8100 + }, + { + "epoch": 0.0662605255058707, + "grad_norm": 0.8704981803894043, + "learning_rate": 4.9946865671638166e-05, + "loss": 3.3981, + "step": 8130 + }, + { + "epoch": 0.06650502929002519, + "grad_norm": 0.9237441420555115, + "learning_rate": 4.9946447167540835e-05, + "loss": 3.4177, + "step": 8160 + }, + { + "epoch": 0.0667495330741797, + "grad_norm": 0.7941862344741821, + "learning_rate": 4.994602702352896e-05, + "loss": 3.4252, + "step": 8190 + }, + { + "epoch": 0.0669940368583342, + "grad_norm": 0.8230422735214233, + "learning_rate": 4.994560523963018e-05, + "loss": 3.3915, + "step": 8220 + }, + { + "epoch": 0.0672385406424887, + "grad_norm": 0.7928892970085144, + "learning_rate": 4.9945181815872196e-05, + "loss": 3.3738, + "step": 8250 + }, + { + "epoch": 0.06748304442664321, + "grad_norm": 0.798755407333374, + "learning_rate": 4.9944756752282855e-05, + "loss": 3.3991, + "step": 8280 + }, + { + "epoch": 0.06772754821079771, + "grad_norm": 0.744189977645874, + "learning_rate": 4.994433004889011e-05, + "loss": 3.3874, + "step": 8310 + }, + { + "epoch": 0.06797205199495222, + "grad_norm": 0.7570254802703857, + "learning_rate": 4.994390170572199e-05, + "loss": 3.3991, + "step": 8340 + }, + { + "epoch": 0.06821655577910672, + "grad_norm": 0.7630516886711121, + "learning_rate": 4.994347172280667e-05, + "loss": 3.38, + "step": 8370 + }, + { + "epoch": 0.06846105956326123, + "grad_norm": 0.8707906603813171, + "learning_rate": 4.994304010017241e-05, + "loss": 3.3794, + "step": 8400 + }, + { + "epoch": 0.06870556334741573, + "grad_norm": 0.7646734714508057, + "learning_rate": 4.994260683784758e-05, + "loss": 3.4037, + "step": 8430 + }, + { + "epoch": 0.06895006713157023, + "grad_norm": 0.8288954496383667, + "learning_rate": 4.9942171935860674e-05, + "loss": 3.3847, + "step": 8460 + }, + { + "epoch": 0.06919457091572474, + "grad_norm": 0.7819532752037048, + "learning_rate": 4.994173539424026e-05, + "loss": 3.3854, + "step": 8490 + }, + { + "epoch": 0.06943907469987924, + "grad_norm": 0.7958394289016724, + "learning_rate": 4.994129721301506e-05, + "loss": 3.3937, + "step": 8520 + }, + { + "epoch": 0.06968357848403375, + "grad_norm": 0.8431640267372131, + "learning_rate": 4.994085739221386e-05, + "loss": 3.4072, + "step": 8550 + }, + { + "epoch": 0.06992808226818825, + "grad_norm": 0.7959804534912109, + "learning_rate": 4.994041593186558e-05, + "loss": 3.3662, + "step": 8580 + }, + { + "epoch": 0.07017258605234276, + "grad_norm": 0.7684959769248962, + "learning_rate": 4.993997283199924e-05, + "loss": 3.395, + "step": 8610 + }, + { + "epoch": 0.07041708983649726, + "grad_norm": 0.7869526147842407, + "learning_rate": 4.993952809264397e-05, + "loss": 3.3903, + "step": 8640 + }, + { + "epoch": 0.07066159362065176, + "grad_norm": 0.8075766563415527, + "learning_rate": 4.9939081713829006e-05, + "loss": 3.3732, + "step": 8670 + }, + { + "epoch": 0.07090609740480627, + "grad_norm": 0.7754388451576233, + "learning_rate": 4.993863369558369e-05, + "loss": 3.3836, + "step": 8700 + }, + { + "epoch": 0.07115060118896077, + "grad_norm": 0.7476597428321838, + "learning_rate": 4.9938184037937466e-05, + "loss": 3.3767, + "step": 8730 + }, + { + "epoch": 0.07139510497311528, + "grad_norm": 0.7789965271949768, + "learning_rate": 4.993773274091991e-05, + "loss": 3.3518, + "step": 8760 + }, + { + "epoch": 0.07163960875726978, + "grad_norm": 0.8229824900627136, + "learning_rate": 4.993727980456067e-05, + "loss": 3.3629, + "step": 8790 + }, + { + "epoch": 0.0718841125414243, + "grad_norm": 0.762668251991272, + "learning_rate": 4.993682522888954e-05, + "loss": 3.375, + "step": 8820 + }, + { + "epoch": 0.07212861632557879, + "grad_norm": 0.8019202947616577, + "learning_rate": 4.993636901393639e-05, + "loss": 3.3635, + "step": 8850 + }, + { + "epoch": 0.0723731201097333, + "grad_norm": 0.9177828431129456, + "learning_rate": 4.993591115973121e-05, + "loss": 3.37, + "step": 8880 + }, + { + "epoch": 0.0726176238938878, + "grad_norm": 0.8312643766403198, + "learning_rate": 4.9935451666304105e-05, + "loss": 3.3763, + "step": 8910 + }, + { + "epoch": 0.0728621276780423, + "grad_norm": 0.828384518623352, + "learning_rate": 4.993499053368528e-05, + "loss": 3.368, + "step": 8940 + }, + { + "epoch": 0.07310663146219681, + "grad_norm": 1.0234721899032593, + "learning_rate": 4.993452776190504e-05, + "loss": 3.3534, + "step": 8970 + }, + { + "epoch": 0.07335113524635131, + "grad_norm": 0.8042975068092346, + "learning_rate": 4.993406335099382e-05, + "loss": 3.3677, + "step": 9000 + }, + { + "epoch": 0.07359563903050582, + "grad_norm": 0.753710150718689, + "learning_rate": 4.993359730098214e-05, + "loss": 3.3652, + "step": 9030 + }, + { + "epoch": 0.07384014281466032, + "grad_norm": 0.8520479798316956, + "learning_rate": 4.993312961190064e-05, + "loss": 3.3511, + "step": 9060 + }, + { + "epoch": 0.07408464659881484, + "grad_norm": 0.7559372186660767, + "learning_rate": 4.993266028378006e-05, + "loss": 3.3438, + "step": 9090 + }, + { + "epoch": 0.07432915038296933, + "grad_norm": 0.7419788837432861, + "learning_rate": 4.993218931665126e-05, + "loss": 3.3453, + "step": 9120 + }, + { + "epoch": 0.07457365416712383, + "grad_norm": 0.8156657218933105, + "learning_rate": 4.993171671054519e-05, + "loss": 3.3654, + "step": 9150 + }, + { + "epoch": 0.07481815795127834, + "grad_norm": 0.8189958930015564, + "learning_rate": 4.993124246549293e-05, + "loss": 3.3534, + "step": 9180 + }, + { + "epoch": 0.07506266173543284, + "grad_norm": 0.7935733795166016, + "learning_rate": 4.9930766581525645e-05, + "loss": 3.3472, + "step": 9210 + }, + { + "epoch": 0.07530716551958735, + "grad_norm": 0.823512852191925, + "learning_rate": 4.993028905867463e-05, + "loss": 3.3476, + "step": 9240 + }, + { + "epoch": 0.07555166930374185, + "grad_norm": 0.8196141719818115, + "learning_rate": 4.992980989697126e-05, + "loss": 3.3479, + "step": 9270 + }, + { + "epoch": 0.07579617308789637, + "grad_norm": 0.7773461937904358, + "learning_rate": 4.992932909644705e-05, + "loss": 3.344, + "step": 9300 + }, + { + "epoch": 0.07604067687205086, + "grad_norm": 0.7703831791877747, + "learning_rate": 4.9928846657133596e-05, + "loss": 3.3495, + "step": 9330 + }, + { + "epoch": 0.07628518065620536, + "grad_norm": 0.7672068476676941, + "learning_rate": 4.992836257906262e-05, + "loss": 3.3607, + "step": 9360 + }, + { + "epoch": 0.07652968444035987, + "grad_norm": 0.7359826564788818, + "learning_rate": 4.9927876862265935e-05, + "loss": 3.3213, + "step": 9390 + }, + { + "epoch": 0.07677418822451437, + "grad_norm": 0.8856120705604553, + "learning_rate": 4.992738950677548e-05, + "loss": 3.3132, + "step": 9420 + }, + { + "epoch": 0.07701869200866888, + "grad_norm": 0.7555311918258667, + "learning_rate": 4.992690051262329e-05, + "loss": 3.3171, + "step": 9450 + }, + { + "epoch": 0.07726319579282338, + "grad_norm": 0.8101349472999573, + "learning_rate": 4.99264098798415e-05, + "loss": 3.3356, + "step": 9480 + }, + { + "epoch": 0.0775076995769779, + "grad_norm": 0.7647647857666016, + "learning_rate": 4.992591760846238e-05, + "loss": 3.318, + "step": 9510 + }, + { + "epoch": 0.0777522033611324, + "grad_norm": 0.7218281030654907, + "learning_rate": 4.9925423698518274e-05, + "loss": 3.3136, + "step": 9540 + }, + { + "epoch": 0.0779967071452869, + "grad_norm": 0.8410701155662537, + "learning_rate": 4.992492815004166e-05, + "loss": 3.3436, + "step": 9570 + }, + { + "epoch": 0.0782412109294414, + "grad_norm": 0.7226998209953308, + "learning_rate": 4.992443096306512e-05, + "loss": 3.3171, + "step": 9600 + }, + { + "epoch": 0.0784857147135959, + "grad_norm": 0.7702776193618774, + "learning_rate": 4.992393213762132e-05, + "loss": 3.3512, + "step": 9630 + }, + { + "epoch": 0.07873021849775041, + "grad_norm": 0.816752016544342, + "learning_rate": 4.992343167374307e-05, + "loss": 3.3189, + "step": 9660 + }, + { + "epoch": 0.07897472228190491, + "grad_norm": 0.854523241519928, + "learning_rate": 4.992292957146326e-05, + "loss": 3.315, + "step": 9690 + }, + { + "epoch": 0.07921922606605943, + "grad_norm": 0.7951149940490723, + "learning_rate": 4.992242583081489e-05, + "loss": 3.314, + "step": 9720 + }, + { + "epoch": 0.07946372985021392, + "grad_norm": 0.7649703621864319, + "learning_rate": 4.992192045183109e-05, + "loss": 3.2931, + "step": 9750 + }, + { + "epoch": 0.07970823363436844, + "grad_norm": 0.8076454401016235, + "learning_rate": 4.9921413434545075e-05, + "loss": 3.2975, + "step": 9780 + }, + { + "epoch": 0.07995273741852293, + "grad_norm": 0.7618278861045837, + "learning_rate": 4.992090477899018e-05, + "loss": 3.2909, + "step": 9810 + }, + { + "epoch": 0.08019724120267743, + "grad_norm": 0.748193085193634, + "learning_rate": 4.992039448519982e-05, + "loss": 3.2894, + "step": 9840 + }, + { + "epoch": 0.08044174498683195, + "grad_norm": 0.7488676309585571, + "learning_rate": 4.9919882553207566e-05, + "loss": 3.2905, + "step": 9870 + }, + { + "epoch": 0.08068624877098644, + "grad_norm": 0.7544564604759216, + "learning_rate": 4.9919368983047066e-05, + "loss": 3.2983, + "step": 9900 + }, + { + "epoch": 0.08093075255514096, + "grad_norm": 0.7570173144340515, + "learning_rate": 4.9918853774752074e-05, + "loss": 3.3151, + "step": 9930 + }, + { + "epoch": 0.08117525633929545, + "grad_norm": 0.7475308775901794, + "learning_rate": 4.991833692835646e-05, + "loss": 3.2878, + "step": 9960 + }, + { + "epoch": 0.08141976012344997, + "grad_norm": 0.858024001121521, + "learning_rate": 4.9917818443894203e-05, + "loss": 3.298, + "step": 9990 + }, + { + "epoch": 0.08166426390760446, + "grad_norm": 0.9019065499305725, + "learning_rate": 4.991729832139939e-05, + "loss": 3.2961, + "step": 10020 + }, + { + "epoch": 0.08190876769175896, + "grad_norm": 0.757036030292511, + "learning_rate": 4.991677656090621e-05, + "loss": 3.2939, + "step": 10050 + }, + { + "epoch": 0.08215327147591348, + "grad_norm": 0.7498080730438232, + "learning_rate": 4.991625316244896e-05, + "loss": 3.3187, + "step": 10080 + }, + { + "epoch": 0.08239777526006797, + "grad_norm": 0.7562547922134399, + "learning_rate": 4.991572812606205e-05, + "loss": 3.3113, + "step": 10110 + }, + { + "epoch": 0.08264227904422249, + "grad_norm": 0.7378952503204346, + "learning_rate": 4.991520145177998e-05, + "loss": 3.3021, + "step": 10140 + }, + { + "epoch": 0.08288678282837698, + "grad_norm": 0.7791491150856018, + "learning_rate": 4.991467313963739e-05, + "loss": 3.2856, + "step": 10170 + }, + { + "epoch": 0.0831312866125315, + "grad_norm": 0.7295765280723572, + "learning_rate": 4.991414318966901e-05, + "loss": 3.2815, + "step": 10200 + }, + { + "epoch": 0.083375790396686, + "grad_norm": 0.7607465982437134, + "learning_rate": 4.991361160190966e-05, + "loss": 3.2709, + "step": 10230 + }, + { + "epoch": 0.08362029418084051, + "grad_norm": 0.7715272903442383, + "learning_rate": 4.9913078376394304e-05, + "loss": 3.3011, + "step": 10260 + }, + { + "epoch": 0.083864797964995, + "grad_norm": 0.7366231679916382, + "learning_rate": 4.991254351315799e-05, + "loss": 3.2798, + "step": 10290 + }, + { + "epoch": 0.0841093017491495, + "grad_norm": 0.7520325183868408, + "learning_rate": 4.991200701223587e-05, + "loss": 3.2925, + "step": 10320 + }, + { + "epoch": 0.08435380553330402, + "grad_norm": 0.7658284902572632, + "learning_rate": 4.991146887366323e-05, + "loss": 3.2954, + "step": 10350 + }, + { + "epoch": 0.08459830931745851, + "grad_norm": 0.7013542056083679, + "learning_rate": 4.991092909747542e-05, + "loss": 3.2897, + "step": 10380 + }, + { + "epoch": 0.08484281310161303, + "grad_norm": 0.8035088181495667, + "learning_rate": 4.9910387683707946e-05, + "loss": 3.2876, + "step": 10410 + }, + { + "epoch": 0.08508731688576752, + "grad_norm": 0.7619040608406067, + "learning_rate": 4.9909844632396386e-05, + "loss": 3.278, + "step": 10440 + }, + { + "epoch": 0.08533182066992204, + "grad_norm": 0.7567503452301025, + "learning_rate": 4.9909299943576445e-05, + "loss": 3.2774, + "step": 10470 + }, + { + "epoch": 0.08557632445407654, + "grad_norm": 0.7545759677886963, + "learning_rate": 4.990875361728393e-05, + "loss": 3.2824, + "step": 10500 + }, + { + "epoch": 0.08582082823823103, + "grad_norm": 0.7955304980278015, + "learning_rate": 4.990820565355475e-05, + "loss": 3.2809, + "step": 10530 + }, + { + "epoch": 0.08606533202238555, + "grad_norm": 0.8105009198188782, + "learning_rate": 4.990765605242493e-05, + "loss": 3.2745, + "step": 10560 + }, + { + "epoch": 0.08630983580654004, + "grad_norm": 0.758289098739624, + "learning_rate": 4.990710481393061e-05, + "loss": 3.2549, + "step": 10590 + }, + { + "epoch": 0.08655433959069456, + "grad_norm": 0.7958084344863892, + "learning_rate": 4.9906551938108003e-05, + "loss": 3.3004, + "step": 10620 + }, + { + "epoch": 0.08679884337484906, + "grad_norm": 0.7208577394485474, + "learning_rate": 4.990599742499347e-05, + "loss": 3.291, + "step": 10650 + }, + { + "epoch": 0.08704334715900357, + "grad_norm": 0.7224754691123962, + "learning_rate": 4.990544127462346e-05, + "loss": 3.2676, + "step": 10680 + }, + { + "epoch": 0.08728785094315807, + "grad_norm": 0.7175268530845642, + "learning_rate": 4.9904883487034537e-05, + "loss": 3.2753, + "step": 10710 + }, + { + "epoch": 0.08753235472731256, + "grad_norm": 0.75492262840271, + "learning_rate": 4.990432406226336e-05, + "loss": 3.2707, + "step": 10740 + }, + { + "epoch": 0.08777685851146708, + "grad_norm": 0.7345853447914124, + "learning_rate": 4.990376300034671e-05, + "loss": 3.2593, + "step": 10770 + }, + { + "epoch": 0.08802136229562157, + "grad_norm": 0.719987154006958, + "learning_rate": 4.990320030132147e-05, + "loss": 3.2689, + "step": 10800 + }, + { + "epoch": 0.08826586607977609, + "grad_norm": 0.7458385229110718, + "learning_rate": 4.990263596522462e-05, + "loss": 3.2822, + "step": 10830 + }, + { + "epoch": 0.08851036986393059, + "grad_norm": 0.7266592383384705, + "learning_rate": 4.9902069992093275e-05, + "loss": 3.2818, + "step": 10860 + }, + { + "epoch": 0.0887548736480851, + "grad_norm": 0.6844702363014221, + "learning_rate": 4.990150238196463e-05, + "loss": 3.2711, + "step": 10890 + }, + { + "epoch": 0.0889993774322396, + "grad_norm": 0.7185301184654236, + "learning_rate": 4.9900933134876e-05, + "loss": 3.2682, + "step": 10920 + }, + { + "epoch": 0.0892438812163941, + "grad_norm": 0.7460073232650757, + "learning_rate": 4.990036225086481e-05, + "loss": 3.2419, + "step": 10950 + }, + { + "epoch": 0.0894883850005486, + "grad_norm": 0.7382895946502686, + "learning_rate": 4.9899789729968585e-05, + "loss": 3.2612, + "step": 10980 + }, + { + "epoch": 0.0897328887847031, + "grad_norm": 0.7204514741897583, + "learning_rate": 4.989921557222496e-05, + "loss": 3.2525, + "step": 11010 + }, + { + "epoch": 0.08997739256885762, + "grad_norm": 0.7512257695198059, + "learning_rate": 4.989863977767167e-05, + "loss": 3.2656, + "step": 11040 + }, + { + "epoch": 0.09022189635301212, + "grad_norm": 0.7579988837242126, + "learning_rate": 4.989806234634659e-05, + "loss": 3.2545, + "step": 11070 + }, + { + "epoch": 0.09046640013716663, + "grad_norm": 0.7989354729652405, + "learning_rate": 4.989748327828765e-05, + "loss": 3.2588, + "step": 11100 + }, + { + "epoch": 0.09071090392132113, + "grad_norm": 0.7362430095672607, + "learning_rate": 4.989690257353294e-05, + "loss": 3.2205, + "step": 11130 + }, + { + "epoch": 0.09095540770547564, + "grad_norm": 0.6724596619606018, + "learning_rate": 4.989632023212062e-05, + "loss": 3.252, + "step": 11160 + }, + { + "epoch": 0.09119991148963014, + "grad_norm": 0.804040789604187, + "learning_rate": 4.9895736254088975e-05, + "loss": 3.2554, + "step": 11190 + }, + { + "epoch": 0.09144441527378463, + "grad_norm": 0.7371083498001099, + "learning_rate": 4.989515063947641e-05, + "loss": 3.2551, + "step": 11220 + }, + { + "epoch": 0.09168891905793915, + "grad_norm": 0.8144158124923706, + "learning_rate": 4.9894563388321395e-05, + "loss": 3.2396, + "step": 11250 + }, + { + "epoch": 0.09193342284209365, + "grad_norm": 0.7379621863365173, + "learning_rate": 4.989397450066254e-05, + "loss": 3.2588, + "step": 11280 + }, + { + "epoch": 0.09217792662624816, + "grad_norm": 0.7908419370651245, + "learning_rate": 4.989338397653858e-05, + "loss": 3.2316, + "step": 11310 + }, + { + "epoch": 0.09242243041040266, + "grad_norm": 0.763767421245575, + "learning_rate": 4.98927918159883e-05, + "loss": 3.2585, + "step": 11340 + }, + { + "epoch": 0.09266693419455717, + "grad_norm": 0.7726889252662659, + "learning_rate": 4.989219801905066e-05, + "loss": 3.2481, + "step": 11370 + }, + { + "epoch": 0.09291143797871167, + "grad_norm": 0.7199206352233887, + "learning_rate": 4.989160258576469e-05, + "loss": 3.2468, + "step": 11400 + }, + { + "epoch": 0.09315594176286617, + "grad_norm": 0.7201483249664307, + "learning_rate": 4.98910055161695e-05, + "loss": 3.2226, + "step": 11430 + }, + { + "epoch": 0.09340044554702068, + "grad_norm": 0.724740743637085, + "learning_rate": 4.989040681030437e-05, + "loss": 3.2546, + "step": 11460 + }, + { + "epoch": 0.09364494933117518, + "grad_norm": 0.7120088338851929, + "learning_rate": 4.988980646820865e-05, + "loss": 3.2397, + "step": 11490 + }, + { + "epoch": 0.09388945311532969, + "grad_norm": 0.7296812534332275, + "learning_rate": 4.9889204489921804e-05, + "loss": 3.2201, + "step": 11520 + }, + { + "epoch": 0.09413395689948419, + "grad_norm": 0.7710677981376648, + "learning_rate": 4.9888600875483404e-05, + "loss": 3.2475, + "step": 11550 + }, + { + "epoch": 0.0943784606836387, + "grad_norm": 0.7161293029785156, + "learning_rate": 4.9887995624933137e-05, + "loss": 3.2289, + "step": 11580 + }, + { + "epoch": 0.0946229644677932, + "grad_norm": 0.6972730755805969, + "learning_rate": 4.988738873831078e-05, + "loss": 3.2247, + "step": 11610 + }, + { + "epoch": 0.0948674682519477, + "grad_norm": 0.7108049988746643, + "learning_rate": 4.988678021565623e-05, + "loss": 3.2227, + "step": 11640 + }, + { + "epoch": 0.09511197203610221, + "grad_norm": 0.729718029499054, + "learning_rate": 4.988617005700949e-05, + "loss": 3.2396, + "step": 11670 + }, + { + "epoch": 0.0953564758202567, + "grad_norm": 0.6947882771492004, + "learning_rate": 4.988555826241068e-05, + "loss": 3.238, + "step": 11700 + }, + { + "epoch": 0.09560097960441122, + "grad_norm": 0.7448226809501648, + "learning_rate": 4.988494483190001e-05, + "loss": 3.2274, + "step": 11730 + }, + { + "epoch": 0.09584548338856572, + "grad_norm": 0.6681617498397827, + "learning_rate": 4.98843297655178e-05, + "loss": 3.2504, + "step": 11760 + }, + { + "epoch": 0.09608998717272023, + "grad_norm": 0.7603419423103333, + "learning_rate": 4.988371306330449e-05, + "loss": 3.2206, + "step": 11790 + }, + { + "epoch": 0.09633449095687473, + "grad_norm": 0.7187109589576721, + "learning_rate": 4.9883094725300625e-05, + "loss": 3.2313, + "step": 11820 + }, + { + "epoch": 0.09657899474102924, + "grad_norm": 0.7375375628471375, + "learning_rate": 4.9882474751546846e-05, + "loss": 3.214, + "step": 11850 + }, + { + "epoch": 0.09682349852518374, + "grad_norm": 0.7366687059402466, + "learning_rate": 4.98818531420839e-05, + "loss": 3.2191, + "step": 11880 + }, + { + "epoch": 0.09706800230933824, + "grad_norm": 0.690647304058075, + "learning_rate": 4.988122989695267e-05, + "loss": 3.2378, + "step": 11910 + }, + { + "epoch": 0.09731250609349275, + "grad_norm": 0.7315085530281067, + "learning_rate": 4.9880605016194116e-05, + "loss": 3.2121, + "step": 11940 + }, + { + "epoch": 0.09755700987764725, + "grad_norm": 0.7334670424461365, + "learning_rate": 4.9879978499849304e-05, + "loss": 3.2194, + "step": 11970 + }, + { + "epoch": 0.09780151366180176, + "grad_norm": 0.7092124223709106, + "learning_rate": 4.987935034795944e-05, + "loss": 3.2162, + "step": 12000 + }, + { + "epoch": 0.09804601744595626, + "grad_norm": 0.7557071447372437, + "learning_rate": 4.9878720560565803e-05, + "loss": 3.21, + "step": 12030 + }, + { + "epoch": 0.09829052123011077, + "grad_norm": 0.7767185568809509, + "learning_rate": 4.9878089137709806e-05, + "loss": 3.2221, + "step": 12060 + }, + { + "epoch": 0.09853502501426527, + "grad_norm": 0.7004647850990295, + "learning_rate": 4.987745607943295e-05, + "loss": 3.2333, + "step": 12090 + }, + { + "epoch": 0.09877952879841977, + "grad_norm": 0.6746474504470825, + "learning_rate": 4.9876821385776854e-05, + "loss": 3.2185, + "step": 12120 + }, + { + "epoch": 0.09902403258257428, + "grad_norm": 0.7061980962753296, + "learning_rate": 4.9876185056783226e-05, + "loss": 3.2159, + "step": 12150 + }, + { + "epoch": 0.09926853636672878, + "grad_norm": 0.7409046292304993, + "learning_rate": 4.987554709249391e-05, + "loss": 3.2214, + "step": 12180 + }, + { + "epoch": 0.09951304015088329, + "grad_norm": 0.6951879858970642, + "learning_rate": 4.987490749295085e-05, + "loss": 3.2124, + "step": 12210 + }, + { + "epoch": 0.09975754393503779, + "grad_norm": 0.7459700107574463, + "learning_rate": 4.9874266258196084e-05, + "loss": 3.2074, + "step": 12240 + }, + { + "epoch": 0.1000020477191923, + "grad_norm": 0.654021680355072, + "learning_rate": 4.9873623388271764e-05, + "loss": 3.2199, + "step": 12270 + }, + { + "epoch": 0.1002465515033468, + "grad_norm": 0.7051617503166199, + "learning_rate": 4.987297888322015e-05, + "loss": 3.1998, + "step": 12300 + }, + { + "epoch": 0.1004910552875013, + "grad_norm": 0.7212511301040649, + "learning_rate": 4.9872332743083615e-05, + "loss": 3.2144, + "step": 12330 + }, + { + "epoch": 0.10073555907165581, + "grad_norm": 0.6988245844841003, + "learning_rate": 4.987168496790463e-05, + "loss": 3.2036, + "step": 12360 + }, + { + "epoch": 0.10098006285581031, + "grad_norm": 0.7479727864265442, + "learning_rate": 4.9871035557725774e-05, + "loss": 3.2035, + "step": 12390 + }, + { + "epoch": 0.10122456663996482, + "grad_norm": 0.6703733801841736, + "learning_rate": 4.987038451258975e-05, + "loss": 3.2119, + "step": 12420 + }, + { + "epoch": 0.10146907042411932, + "grad_norm": 0.7142158150672913, + "learning_rate": 4.9869731832539346e-05, + "loss": 3.1866, + "step": 12450 + }, + { + "epoch": 0.10171357420827383, + "grad_norm": 0.6590874195098877, + "learning_rate": 4.9869077517617474e-05, + "loss": 3.1854, + "step": 12480 + }, + { + "epoch": 0.10195807799242833, + "grad_norm": 0.7379269599914551, + "learning_rate": 4.986842156786714e-05, + "loss": 3.1755, + "step": 12510 + }, + { + "epoch": 0.10220258177658284, + "grad_norm": 0.7689487934112549, + "learning_rate": 4.9867763983331474e-05, + "loss": 3.1838, + "step": 12540 + }, + { + "epoch": 0.10244708556073734, + "grad_norm": 0.6958329677581787, + "learning_rate": 4.9867104764053697e-05, + "loss": 3.191, + "step": 12570 + }, + { + "epoch": 0.10269158934489184, + "grad_norm": 0.7052045464515686, + "learning_rate": 4.986644391007714e-05, + "loss": 3.1773, + "step": 12600 + }, + { + "epoch": 0.10293609312904635, + "grad_norm": 0.6961473226547241, + "learning_rate": 4.986578142144525e-05, + "loss": 3.1982, + "step": 12630 + }, + { + "epoch": 0.10318059691320085, + "grad_norm": 0.6776019930839539, + "learning_rate": 4.986511729820158e-05, + "loss": 3.1932, + "step": 12660 + }, + { + "epoch": 0.10342510069735536, + "grad_norm": 0.7005933523178101, + "learning_rate": 4.986445154038979e-05, + "loss": 3.1892, + "step": 12690 + }, + { + "epoch": 0.10366960448150986, + "grad_norm": 0.6863512992858887, + "learning_rate": 4.986378414805364e-05, + "loss": 3.199, + "step": 12720 + }, + { + "epoch": 0.10391410826566437, + "grad_norm": 0.7182413339614868, + "learning_rate": 4.9863115121237006e-05, + "loss": 3.178, + "step": 12750 + }, + { + "epoch": 0.10415861204981887, + "grad_norm": 0.7226789593696594, + "learning_rate": 4.986244445998386e-05, + "loss": 3.197, + "step": 12780 + }, + { + "epoch": 0.10440311583397337, + "grad_norm": 0.7314379215240479, + "learning_rate": 4.9861772164338304e-05, + "loss": 3.184, + "step": 12810 + }, + { + "epoch": 0.10464761961812788, + "grad_norm": 0.7286015152931213, + "learning_rate": 4.986109823434452e-05, + "loss": 3.1846, + "step": 12840 + }, + { + "epoch": 0.10489212340228238, + "grad_norm": 0.6875571608543396, + "learning_rate": 4.986042267004681e-05, + "loss": 3.1965, + "step": 12870 + }, + { + "epoch": 0.10513662718643689, + "grad_norm": 0.7378973364830017, + "learning_rate": 4.98597454714896e-05, + "loss": 3.1946, + "step": 12900 + }, + { + "epoch": 0.10538113097059139, + "grad_norm": 0.7709534764289856, + "learning_rate": 4.985906663871739e-05, + "loss": 3.195, + "step": 12930 + }, + { + "epoch": 0.1056256347547459, + "grad_norm": 0.6385577917098999, + "learning_rate": 4.9858386171774804e-05, + "loss": 3.1958, + "step": 12960 + }, + { + "epoch": 0.1058701385389004, + "grad_norm": 0.7177209258079529, + "learning_rate": 4.985770407070659e-05, + "loss": 3.1669, + "step": 12990 + }, + { + "epoch": 0.1061146423230549, + "grad_norm": 0.6712928414344788, + "learning_rate": 4.985702033555757e-05, + "loss": 3.193, + "step": 13020 + }, + { + "epoch": 0.10635914610720941, + "grad_norm": 0.670972466468811, + "learning_rate": 4.9856334966372705e-05, + "loss": 3.1788, + "step": 13050 + }, + { + "epoch": 0.10660364989136391, + "grad_norm": 0.7206739187240601, + "learning_rate": 4.985564796319705e-05, + "loss": 3.1928, + "step": 13080 + }, + { + "epoch": 0.10684815367551842, + "grad_norm": 0.7591316103935242, + "learning_rate": 4.985495932607576e-05, + "loss": 3.196, + "step": 13110 + }, + { + "epoch": 0.10709265745967292, + "grad_norm": 0.6864051222801208, + "learning_rate": 4.9854269055054096e-05, + "loss": 3.1929, + "step": 13140 + }, + { + "epoch": 0.10733716124382743, + "grad_norm": 0.6852788329124451, + "learning_rate": 4.985357715017744e-05, + "loss": 3.1824, + "step": 13170 + }, + { + "epoch": 0.10758166502798193, + "grad_norm": 0.6889312863349915, + "learning_rate": 4.985288361149129e-05, + "loss": 3.1918, + "step": 13200 + }, + { + "epoch": 0.10782616881213643, + "grad_norm": 0.724328875541687, + "learning_rate": 4.985218843904122e-05, + "loss": 3.1944, + "step": 13230 + }, + { + "epoch": 0.10807067259629094, + "grad_norm": 0.7326011061668396, + "learning_rate": 4.985149163287294e-05, + "loss": 3.1815, + "step": 13260 + }, + { + "epoch": 0.10831517638044544, + "grad_norm": 0.7652591466903687, + "learning_rate": 4.985079319303225e-05, + "loss": 3.1747, + "step": 13290 + }, + { + "epoch": 0.10855968016459995, + "grad_norm": 0.7179089188575745, + "learning_rate": 4.985009311956507e-05, + "loss": 3.1729, + "step": 13320 + }, + { + "epoch": 0.10880418394875445, + "grad_norm": 0.6816122531890869, + "learning_rate": 4.984939141251741e-05, + "loss": 3.1877, + "step": 13350 + }, + { + "epoch": 0.10904868773290896, + "grad_norm": 0.660858690738678, + "learning_rate": 4.9848688071935415e-05, + "loss": 3.1658, + "step": 13380 + }, + { + "epoch": 0.10929319151706346, + "grad_norm": 0.6714891195297241, + "learning_rate": 4.98479830978653e-05, + "loss": 3.1753, + "step": 13410 + }, + { + "epoch": 0.10953769530121797, + "grad_norm": 0.6714405417442322, + "learning_rate": 4.9847276490353425e-05, + "loss": 3.167, + "step": 13440 + }, + { + "epoch": 0.10978219908537247, + "grad_norm": 0.669769823551178, + "learning_rate": 4.984656824944623e-05, + "loss": 3.1433, + "step": 13470 + }, + { + "epoch": 0.11002670286952697, + "grad_norm": 0.7423032522201538, + "learning_rate": 4.984585837519028e-05, + "loss": 3.1587, + "step": 13500 + }, + { + "epoch": 0.11027120665368148, + "grad_norm": 0.6712002754211426, + "learning_rate": 4.984514686763224e-05, + "loss": 3.1472, + "step": 13530 + }, + { + "epoch": 0.11051571043783598, + "grad_norm": 0.7179352045059204, + "learning_rate": 4.984443372681887e-05, + "loss": 3.1537, + "step": 13560 + }, + { + "epoch": 0.11076021422199049, + "grad_norm": 0.7070680260658264, + "learning_rate": 4.9843718952797074e-05, + "loss": 3.1565, + "step": 13590 + }, + { + "epoch": 0.11100471800614499, + "grad_norm": 0.7316999435424805, + "learning_rate": 4.984300254561382e-05, + "loss": 3.1408, + "step": 13620 + }, + { + "epoch": 0.1112492217902995, + "grad_norm": 0.7282266616821289, + "learning_rate": 4.9842284505316206e-05, + "loss": 3.1646, + "step": 13650 + }, + { + "epoch": 0.111493725574454, + "grad_norm": 0.6478064060211182, + "learning_rate": 4.984156483195144e-05, + "loss": 3.134, + "step": 13680 + }, + { + "epoch": 0.1117382293586085, + "grad_norm": 0.6633407473564148, + "learning_rate": 4.984084352556683e-05, + "loss": 3.1501, + "step": 13710 + }, + { + "epoch": 0.11198273314276301, + "grad_norm": 0.6864104866981506, + "learning_rate": 4.984012058620979e-05, + "loss": 3.154, + "step": 13740 + }, + { + "epoch": 0.11222723692691751, + "grad_norm": 0.7092240452766418, + "learning_rate": 4.983939601392784e-05, + "loss": 3.1625, + "step": 13770 + }, + { + "epoch": 0.11247174071107202, + "grad_norm": 0.6620602607727051, + "learning_rate": 4.9838669808768616e-05, + "loss": 3.1681, + "step": 13800 + }, + { + "epoch": 0.11271624449522652, + "grad_norm": 0.6808717250823975, + "learning_rate": 4.9837941970779865e-05, + "loss": 3.1612, + "step": 13830 + }, + { + "epoch": 0.11296074827938103, + "grad_norm": 0.7544457316398621, + "learning_rate": 4.983721250000942e-05, + "loss": 3.1518, + "step": 13860 + }, + { + "epoch": 0.11320525206353553, + "grad_norm": 0.6600370407104492, + "learning_rate": 4.983648139650524e-05, + "loss": 3.1432, + "step": 13890 + }, + { + "epoch": 0.11344975584769003, + "grad_norm": 0.6779924035072327, + "learning_rate": 4.983574866031538e-05, + "loss": 3.1451, + "step": 13920 + }, + { + "epoch": 0.11369425963184454, + "grad_norm": 0.7038785815238953, + "learning_rate": 4.983501429148802e-05, + "loss": 3.1389, + "step": 13950 + }, + { + "epoch": 0.11393876341599904, + "grad_norm": 0.7131289839744568, + "learning_rate": 4.983427829007144e-05, + "loss": 3.1389, + "step": 13980 + }, + { + "epoch": 0.11418326720015355, + "grad_norm": 0.6724546551704407, + "learning_rate": 4.9833540656114004e-05, + "loss": 3.1516, + "step": 14010 + }, + { + "epoch": 0.11442777098430805, + "grad_norm": 0.6874014139175415, + "learning_rate": 4.983280138966421e-05, + "loss": 3.1616, + "step": 14040 + }, + { + "epoch": 0.11467227476846256, + "grad_norm": 0.722695529460907, + "learning_rate": 4.983206049077065e-05, + "loss": 3.1323, + "step": 14070 + }, + { + "epoch": 0.11491677855261706, + "grad_norm": 0.7108673453330994, + "learning_rate": 4.9831317959482044e-05, + "loss": 3.1676, + "step": 14100 + }, + { + "epoch": 0.11516128233677157, + "grad_norm": 0.673359215259552, + "learning_rate": 4.983057379584719e-05, + "loss": 3.1468, + "step": 14130 + }, + { + "epoch": 0.11540578612092607, + "grad_norm": 0.7127307653427124, + "learning_rate": 4.982982799991501e-05, + "loss": 3.1485, + "step": 14160 + }, + { + "epoch": 0.11565028990508057, + "grad_norm": 0.698799729347229, + "learning_rate": 4.982908057173454e-05, + "loss": 3.1342, + "step": 14190 + }, + { + "epoch": 0.11589479368923508, + "grad_norm": 0.6260579228401184, + "learning_rate": 4.98283315113549e-05, + "loss": 3.1381, + "step": 14220 + }, + { + "epoch": 0.11613929747338958, + "grad_norm": 0.6754068732261658, + "learning_rate": 4.982758081882534e-05, + "loss": 3.1409, + "step": 14250 + }, + { + "epoch": 0.11638380125754409, + "grad_norm": 0.6846606135368347, + "learning_rate": 4.982682849419521e-05, + "loss": 3.1422, + "step": 14280 + }, + { + "epoch": 0.11662830504169859, + "grad_norm": 0.6726362109184265, + "learning_rate": 4.982607453751396e-05, + "loss": 3.1339, + "step": 14310 + }, + { + "epoch": 0.1168728088258531, + "grad_norm": 0.6798800230026245, + "learning_rate": 4.982531894883116e-05, + "loss": 3.1571, + "step": 14340 + }, + { + "epoch": 0.1171173126100076, + "grad_norm": 0.7045117616653442, + "learning_rate": 4.9824561728196465e-05, + "loss": 3.1529, + "step": 14370 + }, + { + "epoch": 0.1173618163941621, + "grad_norm": 0.7069810032844543, + "learning_rate": 4.9823802875659674e-05, + "loss": 3.1498, + "step": 14400 + }, + { + "epoch": 0.11760632017831661, + "grad_norm": 0.6553217768669128, + "learning_rate": 4.9823042391270657e-05, + "loss": 3.1188, + "step": 14430 + }, + { + "epoch": 0.11785082396247111, + "grad_norm": 0.6937916874885559, + "learning_rate": 4.9822280275079416e-05, + "loss": 3.1363, + "step": 14460 + }, + { + "epoch": 0.11809532774662562, + "grad_norm": 0.6913928389549255, + "learning_rate": 4.9821516527136034e-05, + "loss": 3.1434, + "step": 14490 + }, + { + "epoch": 0.11833983153078012, + "grad_norm": 0.6402164101600647, + "learning_rate": 4.982075114749074e-05, + "loss": 3.1379, + "step": 14520 + }, + { + "epoch": 0.11858433531493463, + "grad_norm": 0.7302058935165405, + "learning_rate": 4.9819984136193835e-05, + "loss": 3.1501, + "step": 14550 + }, + { + "epoch": 0.11882883909908913, + "grad_norm": 0.6861427426338196, + "learning_rate": 4.9819215493295746e-05, + "loss": 3.1443, + "step": 14580 + }, + { + "epoch": 0.11907334288324363, + "grad_norm": 0.6565712094306946, + "learning_rate": 4.9818445218846995e-05, + "loss": 3.0966, + "step": 14610 + }, + { + "epoch": 0.11931784666739814, + "grad_norm": 0.6484554409980774, + "learning_rate": 4.9817673312898215e-05, + "loss": 3.1578, + "step": 14640 + }, + { + "epoch": 0.11956235045155264, + "grad_norm": 0.6579607129096985, + "learning_rate": 4.981689977550017e-05, + "loss": 3.1177, + "step": 14670 + }, + { + "epoch": 0.11980685423570715, + "grad_norm": 0.6820259690284729, + "learning_rate": 4.9816124606703684e-05, + "loss": 3.1332, + "step": 14700 + }, + { + "epoch": 0.12005135801986165, + "grad_norm": 0.6516405940055847, + "learning_rate": 4.9815347806559734e-05, + "loss": 3.1205, + "step": 14730 + }, + { + "epoch": 0.12029586180401616, + "grad_norm": 0.681978166103363, + "learning_rate": 4.981456937511937e-05, + "loss": 3.1184, + "step": 14760 + }, + { + "epoch": 0.12054036558817066, + "grad_norm": 0.6539319753646851, + "learning_rate": 4.981378931243377e-05, + "loss": 3.1198, + "step": 14790 + }, + { + "epoch": 0.12078486937232517, + "grad_norm": 0.6952561140060425, + "learning_rate": 4.981300761855422e-05, + "loss": 3.1392, + "step": 14820 + }, + { + "epoch": 0.12102937315647967, + "grad_norm": 0.6566502451896667, + "learning_rate": 4.981222429353211e-05, + "loss": 3.141, + "step": 14850 + }, + { + "epoch": 0.12127387694063417, + "grad_norm": 0.7198945879936218, + "learning_rate": 4.981143933741891e-05, + "loss": 3.1337, + "step": 14880 + }, + { + "epoch": 0.12151838072478868, + "grad_norm": 0.6870773434638977, + "learning_rate": 4.981065275026624e-05, + "loss": 3.1529, + "step": 14910 + }, + { + "epoch": 0.12176288450894318, + "grad_norm": 0.6778687238693237, + "learning_rate": 4.9809864532125797e-05, + "loss": 3.1149, + "step": 14940 + }, + { + "epoch": 0.1220073882930977, + "grad_norm": 0.6705347895622253, + "learning_rate": 4.9809074683049415e-05, + "loss": 3.1061, + "step": 14970 + }, + { + "epoch": 0.12225189207725219, + "grad_norm": 0.662770688533783, + "learning_rate": 4.9808283203089e-05, + "loss": 3.1039, + "step": 15000 + }, + { + "epoch": 0.1224963958614067, + "grad_norm": 0.680558443069458, + "learning_rate": 4.9807490092296586e-05, + "loss": 3.1306, + "step": 15030 + }, + { + "epoch": 0.1227408996455612, + "grad_norm": 0.692523717880249, + "learning_rate": 4.980669535072431e-05, + "loss": 3.1291, + "step": 15060 + }, + { + "epoch": 0.1229854034297157, + "grad_norm": 0.6542308926582336, + "learning_rate": 4.980589897842441e-05, + "loss": 3.1058, + "step": 15090 + }, + { + "epoch": 0.12322990721387021, + "grad_norm": 0.6776884198188782, + "learning_rate": 4.9805100975449256e-05, + "loss": 3.0983, + "step": 15120 + }, + { + "epoch": 0.12347441099802471, + "grad_norm": 0.6549819111824036, + "learning_rate": 4.98043013418513e-05, + "loss": 3.1171, + "step": 15150 + }, + { + "epoch": 0.12371891478217922, + "grad_norm": 0.6724957823753357, + "learning_rate": 4.980350007768309e-05, + "loss": 3.107, + "step": 15180 + }, + { + "epoch": 0.12396341856633372, + "grad_norm": 0.7151206731796265, + "learning_rate": 4.9802697182997316e-05, + "loss": 3.1308, + "step": 15210 + }, + { + "epoch": 0.12420792235048823, + "grad_norm": 0.6585493683815002, + "learning_rate": 4.980189265784676e-05, + "loss": 3.0973, + "step": 15240 + }, + { + "epoch": 0.12445242613464273, + "grad_norm": 0.6829107403755188, + "learning_rate": 4.98010865022843e-05, + "loss": 3.117, + "step": 15270 + }, + { + "epoch": 0.12469692991879723, + "grad_norm": 0.6554434895515442, + "learning_rate": 4.980027871636293e-05, + "loss": 3.1089, + "step": 15300 + }, + { + "epoch": 0.12494143370295174, + "grad_norm": 0.6667256355285645, + "learning_rate": 4.979946930013576e-05, + "loss": 3.0926, + "step": 15330 + }, + { + "epoch": 0.12518593748710624, + "grad_norm": 0.6656805872917175, + "learning_rate": 4.9798658253655996e-05, + "loss": 3.103, + "step": 15360 + }, + { + "epoch": 0.12543044127126074, + "grad_norm": 0.7125841379165649, + "learning_rate": 4.9797845576976945e-05, + "loss": 3.1153, + "step": 15390 + }, + { + "epoch": 0.12567494505541527, + "grad_norm": 0.6778884530067444, + "learning_rate": 4.979703127015205e-05, + "loss": 3.1402, + "step": 15420 + }, + { + "epoch": 0.12591944883956976, + "grad_norm": 0.6417499780654907, + "learning_rate": 4.979621533323482e-05, + "loss": 3.1006, + "step": 15450 + }, + { + "epoch": 0.12616395262372426, + "grad_norm": 0.5959713459014893, + "learning_rate": 4.97953977662789e-05, + "loss": 3.0978, + "step": 15480 + }, + { + "epoch": 0.12640845640787876, + "grad_norm": 0.6567413806915283, + "learning_rate": 4.9794578569338046e-05, + "loss": 3.1093, + "step": 15510 + }, + { + "epoch": 0.12665296019203326, + "grad_norm": 0.630220353603363, + "learning_rate": 4.979375774246609e-05, + "loss": 3.106, + "step": 15540 + }, + { + "epoch": 0.12689746397618779, + "grad_norm": 0.6512444615364075, + "learning_rate": 4.979293528571701e-05, + "loss": 3.0794, + "step": 15570 + }, + { + "epoch": 0.12714196776034228, + "grad_norm": 0.69533771276474, + "learning_rate": 4.979211119914486e-05, + "loss": 3.0998, + "step": 15600 + }, + { + "epoch": 0.12738647154449678, + "grad_norm": 0.6545675992965698, + "learning_rate": 4.9791285482803824e-05, + "loss": 3.0992, + "step": 15630 + }, + { + "epoch": 0.12763097532865128, + "grad_norm": 0.6943343877792358, + "learning_rate": 4.979045813674817e-05, + "loss": 3.0894, + "step": 15660 + }, + { + "epoch": 0.1278754791128058, + "grad_norm": 0.6437342166900635, + "learning_rate": 4.9789629161032294e-05, + "loss": 3.093, + "step": 15690 + }, + { + "epoch": 0.1281199828969603, + "grad_norm": 0.6412293910980225, + "learning_rate": 4.978879855571068e-05, + "loss": 3.1068, + "step": 15720 + }, + { + "epoch": 0.1283644866811148, + "grad_norm": 0.6677573323249817, + "learning_rate": 4.9787966320837946e-05, + "loss": 3.122, + "step": 15750 + }, + { + "epoch": 0.1286089904652693, + "grad_norm": 0.6717284321784973, + "learning_rate": 4.97871324564688e-05, + "loss": 3.1016, + "step": 15780 + }, + { + "epoch": 0.1288534942494238, + "grad_norm": 0.674525260925293, + "learning_rate": 4.978629696265804e-05, + "loss": 3.0843, + "step": 15810 + }, + { + "epoch": 0.12909799803357833, + "grad_norm": 0.6576509475708008, + "learning_rate": 4.9785459839460595e-05, + "loss": 3.0866, + "step": 15840 + }, + { + "epoch": 0.12934250181773282, + "grad_norm": 0.6330334544181824, + "learning_rate": 4.97846210869315e-05, + "loss": 3.0852, + "step": 15870 + }, + { + "epoch": 0.12958700560188732, + "grad_norm": 0.6420540809631348, + "learning_rate": 4.978378070512591e-05, + "loss": 3.0794, + "step": 15900 + }, + { + "epoch": 0.12983150938604182, + "grad_norm": 0.6615996956825256, + "learning_rate": 4.978293869409903e-05, + "loss": 3.0974, + "step": 15930 + }, + { + "epoch": 0.13007601317019635, + "grad_norm": 0.6461077332496643, + "learning_rate": 4.978209505390625e-05, + "loss": 3.1028, + "step": 15960 + }, + { + "epoch": 0.13032051695435085, + "grad_norm": 0.761465311050415, + "learning_rate": 4.978124978460301e-05, + "loss": 3.1031, + "step": 15990 + }, + { + "epoch": 0.13056502073850534, + "grad_norm": 0.7302458882331848, + "learning_rate": 4.978040288624488e-05, + "loss": 3.0962, + "step": 16020 + }, + { + "epoch": 0.13080952452265984, + "grad_norm": 0.6438547372817993, + "learning_rate": 4.977955435888753e-05, + "loss": 3.0893, + "step": 16050 + }, + { + "epoch": 0.13105402830681434, + "grad_norm": 0.6334322690963745, + "learning_rate": 4.9778704202586736e-05, + "loss": 3.088, + "step": 16080 + }, + { + "epoch": 0.13129853209096887, + "grad_norm": 0.6831603646278381, + "learning_rate": 4.9777852417398394e-05, + "loss": 3.0889, + "step": 16110 + }, + { + "epoch": 0.13154303587512337, + "grad_norm": 0.6886951327323914, + "learning_rate": 4.977699900337849e-05, + "loss": 3.1016, + "step": 16140 + }, + { + "epoch": 0.13178753965927786, + "grad_norm": 0.6405287384986877, + "learning_rate": 4.977614396058313e-05, + "loss": 3.0914, + "step": 16170 + }, + { + "epoch": 0.13203204344343236, + "grad_norm": 0.671773374080658, + "learning_rate": 4.977528728906853e-05, + "loss": 3.0843, + "step": 16200 + }, + { + "epoch": 0.13227654722758686, + "grad_norm": 0.6488827466964722, + "learning_rate": 4.977442898889099e-05, + "loss": 3.0759, + "step": 16230 + }, + { + "epoch": 0.1325210510117414, + "grad_norm": 0.6638045310974121, + "learning_rate": 4.977356906010694e-05, + "loss": 3.0921, + "step": 16260 + }, + { + "epoch": 0.13276555479589588, + "grad_norm": 0.6270018219947815, + "learning_rate": 4.977270750277291e-05, + "loss": 3.0881, + "step": 16290 + }, + { + "epoch": 0.13301005858005038, + "grad_norm": 0.6884603500366211, + "learning_rate": 4.9771844316945536e-05, + "loss": 3.076, + "step": 16320 + }, + { + "epoch": 0.13325456236420488, + "grad_norm": 0.6531707644462585, + "learning_rate": 4.9770979502681566e-05, + "loss": 3.1, + "step": 16350 + }, + { + "epoch": 0.1334990661483594, + "grad_norm": 0.6543397903442383, + "learning_rate": 4.977011306003784e-05, + "loss": 3.0849, + "step": 16380 + }, + { + "epoch": 0.1337435699325139, + "grad_norm": 0.6806967258453369, + "learning_rate": 4.976924498907133e-05, + "loss": 3.0794, + "step": 16410 + }, + { + "epoch": 0.1339880737166684, + "grad_norm": 0.6400375962257385, + "learning_rate": 4.9768375289839083e-05, + "loss": 3.0783, + "step": 16440 + }, + { + "epoch": 0.1342325775008229, + "grad_norm": 0.6495615243911743, + "learning_rate": 4.976750396239828e-05, + "loss": 3.0674, + "step": 16470 + }, + { + "epoch": 0.1344770812849774, + "grad_norm": 0.6477648019790649, + "learning_rate": 4.9766631006806205e-05, + "loss": 3.0868, + "step": 16500 + }, + { + "epoch": 0.13472158506913193, + "grad_norm": 0.6622707843780518, + "learning_rate": 4.9765756423120235e-05, + "loss": 3.086, + "step": 16530 + }, + { + "epoch": 0.13496608885328643, + "grad_norm": 0.6572865843772888, + "learning_rate": 4.976488021139787e-05, + "loss": 3.0662, + "step": 16560 + }, + { + "epoch": 0.13521059263744092, + "grad_norm": 0.6188306212425232, + "learning_rate": 4.9764002371696704e-05, + "loss": 3.0723, + "step": 16590 + }, + { + "epoch": 0.13545509642159542, + "grad_norm": 0.6763888597488403, + "learning_rate": 4.976312290407445e-05, + "loss": 3.0863, + "step": 16620 + }, + { + "epoch": 0.13569960020574992, + "grad_norm": 0.6841058135032654, + "learning_rate": 4.9762241808588915e-05, + "loss": 3.0839, + "step": 16650 + }, + { + "epoch": 0.13594410398990445, + "grad_norm": 0.6681898832321167, + "learning_rate": 4.976135908529802e-05, + "loss": 3.0775, + "step": 16680 + }, + { + "epoch": 0.13618860777405895, + "grad_norm": 0.6338205337524414, + "learning_rate": 4.97604747342598e-05, + "loss": 3.0795, + "step": 16710 + }, + { + "epoch": 0.13643311155821344, + "grad_norm": 0.6305208206176758, + "learning_rate": 4.9759588755532394e-05, + "loss": 3.0809, + "step": 16740 + }, + { + "epoch": 0.13667761534236794, + "grad_norm": 0.6307396292686462, + "learning_rate": 4.9758701149174026e-05, + "loss": 3.0635, + "step": 16770 + }, + { + "epoch": 0.13692211912652247, + "grad_norm": 0.6475804448127747, + "learning_rate": 4.975781191524306e-05, + "loss": 3.0886, + "step": 16800 + }, + { + "epoch": 0.13716662291067697, + "grad_norm": 0.6961448788642883, + "learning_rate": 4.975692105379794e-05, + "loss": 3.0827, + "step": 16830 + }, + { + "epoch": 0.13741112669483146, + "grad_norm": 0.6734959483146667, + "learning_rate": 4.9756028564897236e-05, + "loss": 3.0541, + "step": 16860 + }, + { + "epoch": 0.13765563047898596, + "grad_norm": 0.6229287981987, + "learning_rate": 4.975513444859963e-05, + "loss": 3.0651, + "step": 16890 + }, + { + "epoch": 0.13790013426314046, + "grad_norm": 0.6343866586685181, + "learning_rate": 4.9754238704963875e-05, + "loss": 3.0831, + "step": 16920 + }, + { + "epoch": 0.138144638047295, + "grad_norm": 0.6195032596588135, + "learning_rate": 4.975334133404887e-05, + "loss": 3.0846, + "step": 16950 + }, + { + "epoch": 0.13838914183144949, + "grad_norm": 0.6339040398597717, + "learning_rate": 4.97524423359136e-05, + "loss": 3.0565, + "step": 16980 + }, + { + "epoch": 0.13863364561560398, + "grad_norm": 0.6364467144012451, + "learning_rate": 4.9751541710617165e-05, + "loss": 3.0752, + "step": 17010 + }, + { + "epoch": 0.13887814939975848, + "grad_norm": 0.715610921382904, + "learning_rate": 4.9750639458218774e-05, + "loss": 3.0903, + "step": 17040 + }, + { + "epoch": 0.139122653183913, + "grad_norm": 0.6401960253715515, + "learning_rate": 4.974973557877773e-05, + "loss": 3.0751, + "step": 17070 + }, + { + "epoch": 0.1393671569680675, + "grad_norm": 0.660082995891571, + "learning_rate": 4.9748830072353455e-05, + "loss": 3.0684, + "step": 17100 + }, + { + "epoch": 0.139611660752222, + "grad_norm": 0.6341212391853333, + "learning_rate": 4.9747922939005484e-05, + "loss": 3.0755, + "step": 17130 + }, + { + "epoch": 0.1398561645363765, + "grad_norm": 0.7254164814949036, + "learning_rate": 4.974701417879344e-05, + "loss": 3.0668, + "step": 17160 + }, + { + "epoch": 0.140100668320531, + "grad_norm": 0.649255633354187, + "learning_rate": 4.974610379177706e-05, + "loss": 3.0527, + "step": 17190 + }, + { + "epoch": 0.14034517210468553, + "grad_norm": 0.6330759525299072, + "learning_rate": 4.9745191778016196e-05, + "loss": 3.0587, + "step": 17220 + }, + { + "epoch": 0.14058967588884003, + "grad_norm": 0.6298040747642517, + "learning_rate": 4.97442781375708e-05, + "loss": 3.0576, + "step": 17250 + }, + { + "epoch": 0.14083417967299453, + "grad_norm": 0.6595584750175476, + "learning_rate": 4.974336287050093e-05, + "loss": 3.0518, + "step": 17280 + }, + { + "epoch": 0.14107868345714902, + "grad_norm": 0.634013831615448, + "learning_rate": 4.974244597686676e-05, + "loss": 3.0628, + "step": 17310 + }, + { + "epoch": 0.14132318724130352, + "grad_norm": 0.6461661458015442, + "learning_rate": 4.974152745672855e-05, + "loss": 3.0455, + "step": 17340 + }, + { + "epoch": 0.14156769102545805, + "grad_norm": 0.635123074054718, + "learning_rate": 4.9740607310146706e-05, + "loss": 3.0677, + "step": 17370 + }, + { + "epoch": 0.14181219480961255, + "grad_norm": 0.6322553753852844, + "learning_rate": 4.973968553718169e-05, + "loss": 3.0534, + "step": 17400 + }, + { + "epoch": 0.14205669859376704, + "grad_norm": 0.6354007720947266, + "learning_rate": 4.9738762137894106e-05, + "loss": 3.0785, + "step": 17430 + }, + { + "epoch": 0.14230120237792154, + "grad_norm": 0.6522131562232971, + "learning_rate": 4.973783711234467e-05, + "loss": 3.0831, + "step": 17460 + }, + { + "epoch": 0.14254570616207607, + "grad_norm": 0.691369354724884, + "learning_rate": 4.9736910460594164e-05, + "loss": 3.0671, + "step": 17490 + }, + { + "epoch": 0.14279020994623057, + "grad_norm": 0.6239174604415894, + "learning_rate": 4.973598218270352e-05, + "loss": 3.0541, + "step": 17520 + }, + { + "epoch": 0.14303471373038507, + "grad_norm": 0.649414598941803, + "learning_rate": 4.973505227873377e-05, + "loss": 3.0581, + "step": 17550 + }, + { + "epoch": 0.14327921751453956, + "grad_norm": 0.6302400827407837, + "learning_rate": 4.9734120748746026e-05, + "loss": 3.056, + "step": 17580 + }, + { + "epoch": 0.14352372129869406, + "grad_norm": 0.6369176506996155, + "learning_rate": 4.973318759280153e-05, + "loss": 3.0468, + "step": 17610 + }, + { + "epoch": 0.1437682250828486, + "grad_norm": 0.6598733067512512, + "learning_rate": 4.973225281096162e-05, + "loss": 3.055, + "step": 17640 + }, + { + "epoch": 0.1440127288670031, + "grad_norm": 0.6348416805267334, + "learning_rate": 4.973131640328776e-05, + "loss": 3.0594, + "step": 17670 + }, + { + "epoch": 0.14425723265115759, + "grad_norm": 0.646902322769165, + "learning_rate": 4.97303783698415e-05, + "loss": 3.042, + "step": 17700 + }, + { + "epoch": 0.14450173643531208, + "grad_norm": 0.6352052688598633, + "learning_rate": 4.97294387106845e-05, + "loss": 3.0574, + "step": 17730 + }, + { + "epoch": 0.1447462402194666, + "grad_norm": 0.6383568048477173, + "learning_rate": 4.972849742587853e-05, + "loss": 3.0635, + "step": 17760 + }, + { + "epoch": 0.1449907440036211, + "grad_norm": 0.6384525895118713, + "learning_rate": 4.972755451548548e-05, + "loss": 3.0595, + "step": 17790 + }, + { + "epoch": 0.1452352477877756, + "grad_norm": 0.635008692741394, + "learning_rate": 4.9726609979567316e-05, + "loss": 3.0414, + "step": 17820 + }, + { + "epoch": 0.1454797515719301, + "grad_norm": 0.6321554183959961, + "learning_rate": 4.972566381818614e-05, + "loss": 3.0696, + "step": 17850 + }, + { + "epoch": 0.1457242553560846, + "grad_norm": 0.6157737970352173, + "learning_rate": 4.972471603140415e-05, + "loss": 3.0504, + "step": 17880 + }, + { + "epoch": 0.14596875914023913, + "grad_norm": 0.6098494529724121, + "learning_rate": 4.972376661928365e-05, + "loss": 3.0512, + "step": 17910 + }, + { + "epoch": 0.14621326292439363, + "grad_norm": 0.6314985156059265, + "learning_rate": 4.972281558188706e-05, + "loss": 3.0349, + "step": 17940 + }, + { + "epoch": 0.14645776670854813, + "grad_norm": 0.628455638885498, + "learning_rate": 4.972186291927689e-05, + "loss": 3.0559, + "step": 17970 + }, + { + "epoch": 0.14670227049270262, + "grad_norm": 0.6198962926864624, + "learning_rate": 4.9720908631515757e-05, + "loss": 3.0521, + "step": 18000 + }, + { + "epoch": 0.14694677427685712, + "grad_norm": 0.6339931488037109, + "learning_rate": 4.9719952718666406e-05, + "loss": 3.0446, + "step": 18030 + }, + { + "epoch": 0.14719127806101165, + "grad_norm": 0.5936692357063293, + "learning_rate": 4.971899518079167e-05, + "loss": 3.0442, + "step": 18060 + }, + { + "epoch": 0.14743578184516615, + "grad_norm": 0.6095844507217407, + "learning_rate": 4.971803601795451e-05, + "loss": 3.0377, + "step": 18090 + }, + { + "epoch": 0.14768028562932065, + "grad_norm": 0.6403419375419617, + "learning_rate": 4.9717075230217955e-05, + "loss": 3.0631, + "step": 18120 + }, + { + "epoch": 0.14792478941347514, + "grad_norm": 0.6064668893814087, + "learning_rate": 4.9716112817645176e-05, + "loss": 3.0596, + "step": 18150 + }, + { + "epoch": 0.14816929319762967, + "grad_norm": 0.6483209729194641, + "learning_rate": 4.971514878029945e-05, + "loss": 3.0478, + "step": 18180 + }, + { + "epoch": 0.14841379698178417, + "grad_norm": 0.6281334757804871, + "learning_rate": 4.971418311824413e-05, + "loss": 3.0314, + "step": 18210 + }, + { + "epoch": 0.14865830076593867, + "grad_norm": 0.6215760707855225, + "learning_rate": 4.971321583154271e-05, + "loss": 3.048, + "step": 18240 + }, + { + "epoch": 0.14890280455009317, + "grad_norm": 0.6094553470611572, + "learning_rate": 4.9712246920258784e-05, + "loss": 3.0391, + "step": 18270 + }, + { + "epoch": 0.14914730833424766, + "grad_norm": 0.6209439635276794, + "learning_rate": 4.9711276384456024e-05, + "loss": 3.0353, + "step": 18300 + }, + { + "epoch": 0.1493918121184022, + "grad_norm": 0.6350612044334412, + "learning_rate": 4.9710304224198245e-05, + "loss": 3.0395, + "step": 18330 + }, + { + "epoch": 0.1496363159025567, + "grad_norm": 0.6057417392730713, + "learning_rate": 4.970933043954935e-05, + "loss": 3.0448, + "step": 18360 + }, + { + "epoch": 0.1498808196867112, + "grad_norm": 0.6194915175437927, + "learning_rate": 4.970835503057335e-05, + "loss": 3.0291, + "step": 18390 + }, + { + "epoch": 0.15012532347086568, + "grad_norm": 0.6081920862197876, + "learning_rate": 4.970737799733438e-05, + "loss": 3.0509, + "step": 18420 + }, + { + "epoch": 0.1503698272550202, + "grad_norm": 0.6237574815750122, + "learning_rate": 4.970639933989665e-05, + "loss": 3.0534, + "step": 18450 + }, + { + "epoch": 0.1506143310391747, + "grad_norm": 0.6470500230789185, + "learning_rate": 4.97054190583245e-05, + "loss": 3.0419, + "step": 18480 + }, + { + "epoch": 0.1508588348233292, + "grad_norm": 0.6340089440345764, + "learning_rate": 4.970443715268238e-05, + "loss": 3.0389, + "step": 18510 + }, + { + "epoch": 0.1511033386074837, + "grad_norm": 0.6185374855995178, + "learning_rate": 4.970345362303482e-05, + "loss": 3.0408, + "step": 18540 + }, + { + "epoch": 0.1513478423916382, + "grad_norm": 0.6287412643432617, + "learning_rate": 4.9702468469446496e-05, + "loss": 3.0409, + "step": 18570 + }, + { + "epoch": 0.15159234617579273, + "grad_norm": 0.651208221912384, + "learning_rate": 4.9701481691982157e-05, + "loss": 3.024, + "step": 18600 + }, + { + "epoch": 0.15183684995994723, + "grad_norm": 0.6494688987731934, + "learning_rate": 4.9700493290706665e-05, + "loss": 3.0333, + "step": 18630 + }, + { + "epoch": 0.15208135374410173, + "grad_norm": 0.6297596096992493, + "learning_rate": 4.969950326568501e-05, + "loss": 3.0369, + "step": 18660 + }, + { + "epoch": 0.15232585752825623, + "grad_norm": 0.6509076356887817, + "learning_rate": 4.969851161698227e-05, + "loss": 3.0342, + "step": 18690 + }, + { + "epoch": 0.15257036131241072, + "grad_norm": 0.6396616101264954, + "learning_rate": 4.969751834466363e-05, + "loss": 3.0346, + "step": 18720 + }, + { + "epoch": 0.15281486509656525, + "grad_norm": 0.603057324886322, + "learning_rate": 4.969652344879438e-05, + "loss": 3.0336, + "step": 18750 + }, + { + "epoch": 0.15305936888071975, + "grad_norm": 0.6266219019889832, + "learning_rate": 4.9695526929439925e-05, + "loss": 3.0433, + "step": 18780 + }, + { + "epoch": 0.15330387266487425, + "grad_norm": 0.6538950800895691, + "learning_rate": 4.969452878666578e-05, + "loss": 3.0271, + "step": 18810 + }, + { + "epoch": 0.15354837644902875, + "grad_norm": 0.6142851710319519, + "learning_rate": 4.969352902053756e-05, + "loss": 3.0436, + "step": 18840 + }, + { + "epoch": 0.15379288023318327, + "grad_norm": 0.6135523319244385, + "learning_rate": 4.9692527631120976e-05, + "loss": 3.0306, + "step": 18870 + }, + { + "epoch": 0.15403738401733777, + "grad_norm": 0.6318328976631165, + "learning_rate": 4.969152461848187e-05, + "loss": 3.0241, + "step": 18900 + }, + { + "epoch": 0.15428188780149227, + "grad_norm": 0.6185058355331421, + "learning_rate": 4.969051998268617e-05, + "loss": 3.0375, + "step": 18930 + }, + { + "epoch": 0.15452639158564677, + "grad_norm": 0.6477802991867065, + "learning_rate": 4.968951372379992e-05, + "loss": 3.0224, + "step": 18960 + }, + { + "epoch": 0.15477089536980126, + "grad_norm": 0.6331843733787537, + "learning_rate": 4.9688505841889266e-05, + "loss": 3.0114, + "step": 18990 + }, + { + "epoch": 0.1550153991539558, + "grad_norm": 0.6481664180755615, + "learning_rate": 4.968749633702047e-05, + "loss": 3.0443, + "step": 19020 + }, + { + "epoch": 0.1552599029381103, + "grad_norm": 0.6068332195281982, + "learning_rate": 4.968648520925988e-05, + "loss": 3.0362, + "step": 19050 + }, + { + "epoch": 0.1555044067222648, + "grad_norm": 0.6112623810768127, + "learning_rate": 4.9685472458673986e-05, + "loss": 3.0289, + "step": 19080 + }, + { + "epoch": 0.15574891050641929, + "grad_norm": 0.5991814136505127, + "learning_rate": 4.968445808532935e-05, + "loss": 3.0233, + "step": 19110 + }, + { + "epoch": 0.1559934142905738, + "grad_norm": 0.64973384141922, + "learning_rate": 4.9683442089292656e-05, + "loss": 3.0264, + "step": 19140 + }, + { + "epoch": 0.1562379180747283, + "grad_norm": 0.6454912424087524, + "learning_rate": 4.9682424470630685e-05, + "loss": 3.0168, + "step": 19170 + }, + { + "epoch": 0.1564824218588828, + "grad_norm": 0.6001042723655701, + "learning_rate": 4.968140522941035e-05, + "loss": 3.0211, + "step": 19200 + }, + { + "epoch": 0.1567269256430373, + "grad_norm": 0.6036773324012756, + "learning_rate": 4.9680384365698644e-05, + "loss": 3.0354, + "step": 19230 + }, + { + "epoch": 0.1569714294271918, + "grad_norm": 0.64256352186203, + "learning_rate": 4.967936187956268e-05, + "loss": 3.0359, + "step": 19260 + }, + { + "epoch": 0.15721593321134633, + "grad_norm": 0.6243559718132019, + "learning_rate": 4.967833777106966e-05, + "loss": 3.0307, + "step": 19290 + }, + { + "epoch": 0.15746043699550083, + "grad_norm": 0.6249656081199646, + "learning_rate": 4.9677312040286926e-05, + "loss": 2.9937, + "step": 19320 + }, + { + "epoch": 0.15770494077965533, + "grad_norm": 0.654472827911377, + "learning_rate": 4.967628468728189e-05, + "loss": 3.0364, + "step": 19350 + }, + { + "epoch": 0.15794944456380983, + "grad_norm": 0.6340649127960205, + "learning_rate": 4.9675255712122105e-05, + "loss": 3.0216, + "step": 19380 + }, + { + "epoch": 0.15819394834796432, + "grad_norm": 0.6119604110717773, + "learning_rate": 4.967422511487519e-05, + "loss": 3.0084, + "step": 19410 + }, + { + "epoch": 0.15843845213211885, + "grad_norm": 0.6218209266662598, + "learning_rate": 4.967319289560892e-05, + "loss": 3.0254, + "step": 19440 + }, + { + "epoch": 0.15868295591627335, + "grad_norm": 0.6509430408477783, + "learning_rate": 4.967215905439113e-05, + "loss": 3.0377, + "step": 19470 + }, + { + "epoch": 0.15892745970042785, + "grad_norm": 0.62874436378479, + "learning_rate": 4.967112359128979e-05, + "loss": 3.0205, + "step": 19500 + }, + { + "epoch": 0.15917196348458235, + "grad_norm": 0.6077570915222168, + "learning_rate": 4.967008650637297e-05, + "loss": 3.0021, + "step": 19530 + }, + { + "epoch": 0.15941646726873687, + "grad_norm": 0.6242923736572266, + "learning_rate": 4.9669047799708834e-05, + "loss": 3.0195, + "step": 19560 + }, + { + "epoch": 0.15966097105289137, + "grad_norm": 0.58917236328125, + "learning_rate": 4.966800747136568e-05, + "loss": 3.0002, + "step": 19590 + }, + { + "epoch": 0.15990547483704587, + "grad_norm": 0.611606776714325, + "learning_rate": 4.966696552141189e-05, + "loss": 3.015, + "step": 19620 + }, + { + "epoch": 0.16014997862120037, + "grad_norm": 0.5987648367881775, + "learning_rate": 4.966592194991596e-05, + "loss": 3.0078, + "step": 19650 + }, + { + "epoch": 0.16039448240535487, + "grad_norm": 0.6070423126220703, + "learning_rate": 4.966487675694649e-05, + "loss": 3.0171, + "step": 19680 + }, + { + "epoch": 0.1606389861895094, + "grad_norm": 0.6137816905975342, + "learning_rate": 4.966382994257218e-05, + "loss": 3.0158, + "step": 19710 + }, + { + "epoch": 0.1608834899736639, + "grad_norm": 0.6102014183998108, + "learning_rate": 4.966278150686186e-05, + "loss": 2.9924, + "step": 19740 + }, + { + "epoch": 0.1611279937578184, + "grad_norm": 0.641494870185852, + "learning_rate": 4.9661731449884444e-05, + "loss": 2.9951, + "step": 19770 + }, + { + "epoch": 0.1613724975419729, + "grad_norm": 0.6112068295478821, + "learning_rate": 4.966067977170896e-05, + "loss": 3.0151, + "step": 19800 + }, + { + "epoch": 0.1616170013261274, + "grad_norm": 0.6172027587890625, + "learning_rate": 4.9659626472404545e-05, + "loss": 3.0018, + "step": 19830 + }, + { + "epoch": 0.1618615051102819, + "grad_norm": 0.6285238862037659, + "learning_rate": 4.965857155204044e-05, + "loss": 3.0134, + "step": 19860 + }, + { + "epoch": 0.1621060088944364, + "grad_norm": 0.6203986406326294, + "learning_rate": 4.965751501068598e-05, + "loss": 3.0134, + "step": 19890 + }, + { + "epoch": 0.1623505126785909, + "grad_norm": 0.6211456656455994, + "learning_rate": 4.965645684841065e-05, + "loss": 3.0258, + "step": 19920 + }, + { + "epoch": 0.1625950164627454, + "grad_norm": 0.5944324135780334, + "learning_rate": 4.965539706528398e-05, + "loss": 3.0115, + "step": 19950 + }, + { + "epoch": 0.16283952024689993, + "grad_norm": 0.640683650970459, + "learning_rate": 4.965433566137565e-05, + "loss": 3.0065, + "step": 19980 + }, + { + "epoch": 0.16308402403105443, + "grad_norm": 0.5876209139823914, + "learning_rate": 4.965327263675543e-05, + "loss": 3.0062, + "step": 20010 + }, + { + "epoch": 0.16332852781520893, + "grad_norm": 0.5797463655471802, + "learning_rate": 4.965220799149321e-05, + "loss": 2.9948, + "step": 20040 + }, + { + "epoch": 0.16357303159936343, + "grad_norm": 0.6228756308555603, + "learning_rate": 4.9651141725658955e-05, + "loss": 3.0144, + "step": 20070 + }, + { + "epoch": 0.16381753538351793, + "grad_norm": 0.6091254949569702, + "learning_rate": 4.9650073839322786e-05, + "loss": 3.0243, + "step": 20100 + }, + { + "epoch": 0.16406203916767245, + "grad_norm": 0.600600004196167, + "learning_rate": 4.964900433255489e-05, + "loss": 3.012, + "step": 20130 + }, + { + "epoch": 0.16430654295182695, + "grad_norm": 0.6189032196998596, + "learning_rate": 4.9647933205425574e-05, + "loss": 3.0066, + "step": 20160 + }, + { + "epoch": 0.16455104673598145, + "grad_norm": 0.6211121678352356, + "learning_rate": 4.9646860458005254e-05, + "loss": 3.0187, + "step": 20190 + }, + { + "epoch": 0.16479555052013595, + "grad_norm": 0.639061450958252, + "learning_rate": 4.9645786090364436e-05, + "loss": 3.0281, + "step": 20220 + }, + { + "epoch": 0.16504005430429047, + "grad_norm": 0.5996602177619934, + "learning_rate": 4.964471010257377e-05, + "loss": 2.9862, + "step": 20250 + }, + { + "epoch": 0.16528455808844497, + "grad_norm": 0.6031808257102966, + "learning_rate": 4.964363249470396e-05, + "loss": 3.0069, + "step": 20280 + }, + { + "epoch": 0.16552906187259947, + "grad_norm": 0.6389682292938232, + "learning_rate": 4.964255326682588e-05, + "loss": 3.0099, + "step": 20310 + }, + { + "epoch": 0.16577356565675397, + "grad_norm": 0.6115438342094421, + "learning_rate": 4.964147241901045e-05, + "loss": 2.9926, + "step": 20340 + }, + { + "epoch": 0.16601806944090847, + "grad_norm": 0.5930183529853821, + "learning_rate": 4.964038995132872e-05, + "loss": 3.0108, + "step": 20370 + }, + { + "epoch": 0.166262573225063, + "grad_norm": 0.6384121775627136, + "learning_rate": 4.963930586385186e-05, + "loss": 3.002, + "step": 20400 + }, + { + "epoch": 0.1665070770092175, + "grad_norm": 0.6042916178703308, + "learning_rate": 4.963822015665114e-05, + "loss": 2.9867, + "step": 20430 + }, + { + "epoch": 0.166751580793372, + "grad_norm": 0.6278625130653381, + "learning_rate": 4.963713282979792e-05, + "loss": 2.9896, + "step": 20460 + }, + { + "epoch": 0.1669960845775265, + "grad_norm": 0.5804896354675293, + "learning_rate": 4.963604388336368e-05, + "loss": 3.0091, + "step": 20490 + }, + { + "epoch": 0.16724058836168101, + "grad_norm": 0.6057926416397095, + "learning_rate": 4.9634953317420006e-05, + "loss": 2.9925, + "step": 20520 + }, + { + "epoch": 0.1674850921458355, + "grad_norm": 0.6216057538986206, + "learning_rate": 4.963386113203859e-05, + "loss": 2.9662, + "step": 20550 + }, + { + "epoch": 0.16772959592999, + "grad_norm": 0.5599972605705261, + "learning_rate": 4.963276732729124e-05, + "loss": 2.9848, + "step": 20580 + }, + { + "epoch": 0.1679740997141445, + "grad_norm": 0.5946940183639526, + "learning_rate": 4.9631671903249835e-05, + "loss": 2.99, + "step": 20610 + }, + { + "epoch": 0.168218603498299, + "grad_norm": 0.663993239402771, + "learning_rate": 4.963057485998641e-05, + "loss": 2.9959, + "step": 20640 + }, + { + "epoch": 0.16846310728245353, + "grad_norm": 0.579962432384491, + "learning_rate": 4.962947619757307e-05, + "loss": 3.0081, + "step": 20670 + }, + { + "epoch": 0.16870761106660803, + "grad_norm": 0.6171576380729675, + "learning_rate": 4.962837591608203e-05, + "loss": 2.9801, + "step": 20700 + }, + { + "epoch": 0.16895211485076253, + "grad_norm": 0.6330801844596863, + "learning_rate": 4.962727401558563e-05, + "loss": 2.986, + "step": 20730 + }, + { + "epoch": 0.16919661863491703, + "grad_norm": 0.5701897740364075, + "learning_rate": 4.962617049615631e-05, + "loss": 2.9687, + "step": 20760 + }, + { + "epoch": 0.16944112241907153, + "grad_norm": 0.6304188966751099, + "learning_rate": 4.96250653578666e-05, + "loss": 2.9874, + "step": 20790 + }, + { + "epoch": 0.16968562620322605, + "grad_norm": 0.594829261302948, + "learning_rate": 4.962395860078917e-05, + "loss": 2.9743, + "step": 20820 + }, + { + "epoch": 0.16993012998738055, + "grad_norm": 0.6388981938362122, + "learning_rate": 4.9622850224996744e-05, + "loss": 2.9882, + "step": 20850 + }, + { + "epoch": 0.17017463377153505, + "grad_norm": 0.5935282111167908, + "learning_rate": 4.962174023056221e-05, + "loss": 2.9889, + "step": 20880 + }, + { + "epoch": 0.17041913755568955, + "grad_norm": 0.6028230786323547, + "learning_rate": 4.962062861755853e-05, + "loss": 3.0032, + "step": 20910 + }, + { + "epoch": 0.17066364133984407, + "grad_norm": 0.6068926453590393, + "learning_rate": 4.9619515386058775e-05, + "loss": 2.9902, + "step": 20940 + }, + { + "epoch": 0.17090814512399857, + "grad_norm": 0.5927624106407166, + "learning_rate": 4.9618400536136124e-05, + "loss": 2.993, + "step": 20970 + }, + { + "epoch": 0.17115264890815307, + "grad_norm": 0.5808185338973999, + "learning_rate": 4.9617284067863866e-05, + "loss": 2.9785, + "step": 21000 + }, + { + "epoch": 0.17139715269230757, + "grad_norm": 0.6751015186309814, + "learning_rate": 4.9616165981315397e-05, + "loss": 2.987, + "step": 21030 + }, + { + "epoch": 0.17164165647646207, + "grad_norm": 0.6041668057441711, + "learning_rate": 4.961504627656422e-05, + "loss": 2.9953, + "step": 21060 + }, + { + "epoch": 0.1718861602606166, + "grad_norm": 0.6176959276199341, + "learning_rate": 4.961392495368393e-05, + "loss": 2.9975, + "step": 21090 + }, + { + "epoch": 0.1721306640447711, + "grad_norm": 0.6609038710594177, + "learning_rate": 4.961280201274826e-05, + "loss": 3.0082, + "step": 21120 + }, + { + "epoch": 0.1723751678289256, + "grad_norm": 0.6228735446929932, + "learning_rate": 4.9611677453831005e-05, + "loss": 2.9649, + "step": 21150 + }, + { + "epoch": 0.1726196716130801, + "grad_norm": 0.5892258882522583, + "learning_rate": 4.9610551277006115e-05, + "loss": 2.9809, + "step": 21180 + }, + { + "epoch": 0.1728641753972346, + "grad_norm": 0.6028584837913513, + "learning_rate": 4.96094234823476e-05, + "loss": 2.989, + "step": 21210 + }, + { + "epoch": 0.1731086791813891, + "grad_norm": 0.583513081073761, + "learning_rate": 4.9608294069929605e-05, + "loss": 2.9662, + "step": 21240 + }, + { + "epoch": 0.1733531829655436, + "grad_norm": 0.6293556094169617, + "learning_rate": 4.960716303982638e-05, + "loss": 2.9646, + "step": 21270 + }, + { + "epoch": 0.1735976867496981, + "grad_norm": 0.5999639630317688, + "learning_rate": 4.960603039211227e-05, + "loss": 2.9678, + "step": 21300 + }, + { + "epoch": 0.1738421905338526, + "grad_norm": 0.5838433504104614, + "learning_rate": 4.960489612686174e-05, + "loss": 2.9879, + "step": 21330 + }, + { + "epoch": 0.17408669431800713, + "grad_norm": 0.5837694406509399, + "learning_rate": 4.960376024414935e-05, + "loss": 2.9804, + "step": 21360 + }, + { + "epoch": 0.17433119810216163, + "grad_norm": 0.5874453186988831, + "learning_rate": 4.9602622744049765e-05, + "loss": 2.9765, + "step": 21390 + }, + { + "epoch": 0.17457570188631613, + "grad_norm": 0.6144499778747559, + "learning_rate": 4.960148362663777e-05, + "loss": 2.9787, + "step": 21420 + }, + { + "epoch": 0.17482020567047063, + "grad_norm": 0.6012053489685059, + "learning_rate": 4.960034289198824e-05, + "loss": 2.9816, + "step": 21450 + }, + { + "epoch": 0.17506470945462513, + "grad_norm": 0.6054373383522034, + "learning_rate": 4.959920054017616e-05, + "loss": 2.9876, + "step": 21480 + }, + { + "epoch": 0.17530921323877965, + "grad_norm": 0.5829117894172668, + "learning_rate": 4.959805657127664e-05, + "loss": 2.9821, + "step": 21510 + }, + { + "epoch": 0.17555371702293415, + "grad_norm": 0.5930308699607849, + "learning_rate": 4.959691098536487e-05, + "loss": 2.9861, + "step": 21540 + }, + { + "epoch": 0.17579822080708865, + "grad_norm": 0.5863514542579651, + "learning_rate": 4.959576378251617e-05, + "loss": 2.9819, + "step": 21570 + }, + { + "epoch": 0.17604272459124315, + "grad_norm": 0.6006020307540894, + "learning_rate": 4.959461496280594e-05, + "loss": 2.9804, + "step": 21600 + }, + { + "epoch": 0.17628722837539768, + "grad_norm": 0.5972082018852234, + "learning_rate": 4.95934645263097e-05, + "loss": 2.9772, + "step": 21630 + }, + { + "epoch": 0.17653173215955217, + "grad_norm": 0.5825967192649841, + "learning_rate": 4.9592312473103086e-05, + "loss": 2.9835, + "step": 21660 + }, + { + "epoch": 0.17677623594370667, + "grad_norm": 0.5810343623161316, + "learning_rate": 4.9591158803261834e-05, + "loss": 2.9717, + "step": 21690 + }, + { + "epoch": 0.17702073972786117, + "grad_norm": 0.5918793678283691, + "learning_rate": 4.9590003516861774e-05, + "loss": 2.9554, + "step": 21720 + }, + { + "epoch": 0.17726524351201567, + "grad_norm": 0.5869923233985901, + "learning_rate": 4.958884661397885e-05, + "loss": 2.971, + "step": 21750 + }, + { + "epoch": 0.1775097472961702, + "grad_norm": 0.6326466202735901, + "learning_rate": 4.958768809468912e-05, + "loss": 2.9822, + "step": 21780 + }, + { + "epoch": 0.1777542510803247, + "grad_norm": 0.6132392883300781, + "learning_rate": 4.958652795906874e-05, + "loss": 2.9869, + "step": 21810 + }, + { + "epoch": 0.1779987548644792, + "grad_norm": 0.6292781829833984, + "learning_rate": 4.9585366207193984e-05, + "loss": 2.979, + "step": 21840 + }, + { + "epoch": 0.1782432586486337, + "grad_norm": 0.603531002998352, + "learning_rate": 4.95842028391412e-05, + "loss": 3.0038, + "step": 21870 + }, + { + "epoch": 0.1784877624327882, + "grad_norm": 0.6049748659133911, + "learning_rate": 4.958303785498689e-05, + "loss": 2.9919, + "step": 21900 + }, + { + "epoch": 0.17873226621694271, + "grad_norm": 0.6037519574165344, + "learning_rate": 4.958187125480761e-05, + "loss": 2.98, + "step": 21930 + }, + { + "epoch": 0.1789767700010972, + "grad_norm": 0.5910034775733948, + "learning_rate": 4.958070303868008e-05, + "loss": 2.9605, + "step": 21960 + }, + { + "epoch": 0.1792212737852517, + "grad_norm": 0.5778069496154785, + "learning_rate": 4.9579533206681066e-05, + "loss": 2.9668, + "step": 21990 + }, + { + "epoch": 0.1794657775694062, + "grad_norm": 0.5993695855140686, + "learning_rate": 4.957836175888749e-05, + "loss": 2.9568, + "step": 22020 + }, + { + "epoch": 0.17971028135356074, + "grad_norm": 0.6744295358657837, + "learning_rate": 4.957718869537635e-05, + "loss": 2.9903, + "step": 22050 + }, + { + "epoch": 0.17995478513771523, + "grad_norm": 0.6041865944862366, + "learning_rate": 4.957601401622477e-05, + "loss": 2.9791, + "step": 22080 + }, + { + "epoch": 0.18019928892186973, + "grad_norm": 0.6148335933685303, + "learning_rate": 4.957483772150996e-05, + "loss": 2.9856, + "step": 22110 + }, + { + "epoch": 0.18044379270602423, + "grad_norm": 0.6091467142105103, + "learning_rate": 4.957365981130925e-05, + "loss": 2.9485, + "step": 22140 + }, + { + "epoch": 0.18068829649017873, + "grad_norm": 0.6290934681892395, + "learning_rate": 4.957248028570007e-05, + "loss": 2.9712, + "step": 22170 + }, + { + "epoch": 0.18093280027433326, + "grad_norm": 0.570002019405365, + "learning_rate": 4.9571299144759965e-05, + "loss": 2.9787, + "step": 22200 + }, + { + "epoch": 0.18117730405848775, + "grad_norm": 0.5980287790298462, + "learning_rate": 4.957011638856658e-05, + "loss": 2.9696, + "step": 22230 + }, + { + "epoch": 0.18142180784264225, + "grad_norm": 0.6118261814117432, + "learning_rate": 4.956893201719766e-05, + "loss": 2.9635, + "step": 22260 + }, + { + "epoch": 0.18166631162679675, + "grad_norm": 0.578364372253418, + "learning_rate": 4.956774603073107e-05, + "loss": 2.9691, + "step": 22290 + }, + { + "epoch": 0.18191081541095128, + "grad_norm": 0.6028851270675659, + "learning_rate": 4.956655842924477e-05, + "loss": 2.9829, + "step": 22320 + }, + { + "epoch": 0.18215531919510577, + "grad_norm": 0.5682802200317383, + "learning_rate": 4.9565369212816825e-05, + "loss": 2.9643, + "step": 22350 + }, + { + "epoch": 0.18239982297926027, + "grad_norm": 0.611182689666748, + "learning_rate": 4.9564178381525414e-05, + "loss": 2.956, + "step": 22380 + }, + { + "epoch": 0.18264432676341477, + "grad_norm": 0.5692561864852905, + "learning_rate": 4.956298593544883e-05, + "loss": 2.9766, + "step": 22410 + }, + { + "epoch": 0.18288883054756927, + "grad_norm": 0.5628278255462646, + "learning_rate": 4.9561791874665445e-05, + "loss": 2.9449, + "step": 22440 + }, + { + "epoch": 0.1831333343317238, + "grad_norm": 0.6359434723854065, + "learning_rate": 4.956059619925376e-05, + "loss": 2.9749, + "step": 22470 + }, + { + "epoch": 0.1833778381158783, + "grad_norm": 0.5779123306274414, + "learning_rate": 4.9559398909292376e-05, + "loss": 2.9668, + "step": 22500 + }, + { + "epoch": 0.1836223419000328, + "grad_norm": 0.5938737988471985, + "learning_rate": 4.955820000486e-05, + "loss": 2.9592, + "step": 22530 + }, + { + "epoch": 0.1838668456841873, + "grad_norm": 0.6009770035743713, + "learning_rate": 4.9556999486035444e-05, + "loss": 2.975, + "step": 22560 + }, + { + "epoch": 0.1841113494683418, + "grad_norm": 0.549861490726471, + "learning_rate": 4.9555797352897625e-05, + "loss": 2.9631, + "step": 22590 + }, + { + "epoch": 0.18435585325249632, + "grad_norm": 0.613752543926239, + "learning_rate": 4.955459360552558e-05, + "loss": 2.959, + "step": 22620 + }, + { + "epoch": 0.18460035703665081, + "grad_norm": 0.5959582924842834, + "learning_rate": 4.9553388243998424e-05, + "loss": 2.9752, + "step": 22650 + }, + { + "epoch": 0.1848448608208053, + "grad_norm": 0.5848221182823181, + "learning_rate": 4.9552181268395404e-05, + "loss": 2.9545, + "step": 22680 + }, + { + "epoch": 0.1850893646049598, + "grad_norm": 0.5932062864303589, + "learning_rate": 4.955097267879586e-05, + "loss": 2.9452, + "step": 22710 + }, + { + "epoch": 0.18533386838911434, + "grad_norm": 0.5847275853157043, + "learning_rate": 4.954976247527924e-05, + "loss": 2.9562, + "step": 22740 + }, + { + "epoch": 0.18557837217326884, + "grad_norm": 0.6085349917411804, + "learning_rate": 4.9548550657925095e-05, + "loss": 2.9498, + "step": 22770 + }, + { + "epoch": 0.18582287595742333, + "grad_norm": 0.5871360301971436, + "learning_rate": 4.954733722681311e-05, + "loss": 2.9585, + "step": 22800 + }, + { + "epoch": 0.18606737974157783, + "grad_norm": 0.5915745496749878, + "learning_rate": 4.954612218202303e-05, + "loss": 2.9549, + "step": 22830 + }, + { + "epoch": 0.18631188352573233, + "grad_norm": 0.5842469930648804, + "learning_rate": 4.954490552363473e-05, + "loss": 2.9667, + "step": 22860 + }, + { + "epoch": 0.18655638730988686, + "grad_norm": 0.5912492871284485, + "learning_rate": 4.95436872517282e-05, + "loss": 2.9672, + "step": 22890 + }, + { + "epoch": 0.18680089109404135, + "grad_norm": 0.558610200881958, + "learning_rate": 4.954246736638352e-05, + "loss": 2.9644, + "step": 22920 + }, + { + "epoch": 0.18704539487819585, + "grad_norm": 0.6107169985771179, + "learning_rate": 4.954124586768088e-05, + "loss": 2.955, + "step": 22950 + }, + { + "epoch": 0.18728989866235035, + "grad_norm": 0.6018122434616089, + "learning_rate": 4.9540022755700587e-05, + "loss": 2.963, + "step": 22980 + }, + { + "epoch": 0.18753440244650488, + "grad_norm": 0.6130113005638123, + "learning_rate": 4.953879803052304e-05, + "loss": 2.9665, + "step": 23010 + }, + { + "epoch": 0.18777890623065938, + "grad_norm": 0.6094121336936951, + "learning_rate": 4.9537571692228744e-05, + "loss": 2.9505, + "step": 23040 + }, + { + "epoch": 0.18802341001481387, + "grad_norm": 0.5776805877685547, + "learning_rate": 4.953634374089832e-05, + "loss": 2.9508, + "step": 23070 + }, + { + "epoch": 0.18826791379896837, + "grad_norm": 0.6227549314498901, + "learning_rate": 4.953511417661249e-05, + "loss": 2.9521, + "step": 23100 + }, + { + "epoch": 0.18851241758312287, + "grad_norm": 0.5928400754928589, + "learning_rate": 4.953388299945209e-05, + "loss": 2.9481, + "step": 23130 + }, + { + "epoch": 0.1887569213672774, + "grad_norm": 0.6355761289596558, + "learning_rate": 4.9532650209498045e-05, + "loss": 2.9619, + "step": 23160 + }, + { + "epoch": 0.1890014251514319, + "grad_norm": 0.5992570519447327, + "learning_rate": 4.9531415806831394e-05, + "loss": 2.9551, + "step": 23190 + }, + { + "epoch": 0.1892459289355864, + "grad_norm": 0.611517071723938, + "learning_rate": 4.953017979153329e-05, + "loss": 2.9727, + "step": 23220 + }, + { + "epoch": 0.1894904327197409, + "grad_norm": 0.5785388350486755, + "learning_rate": 4.952894216368498e-05, + "loss": 2.9751, + "step": 23250 + }, + { + "epoch": 0.1897349365038954, + "grad_norm": 0.5729740858078003, + "learning_rate": 4.952770292336782e-05, + "loss": 2.9428, + "step": 23280 + }, + { + "epoch": 0.18997944028804992, + "grad_norm": 0.5732846856117249, + "learning_rate": 4.9526462070663294e-05, + "loss": 2.9361, + "step": 23310 + }, + { + "epoch": 0.19022394407220442, + "grad_norm": 0.5825323462486267, + "learning_rate": 4.952521960565295e-05, + "loss": 2.9467, + "step": 23340 + }, + { + "epoch": 0.1904684478563589, + "grad_norm": 0.5768432021141052, + "learning_rate": 4.952397552841846e-05, + "loss": 2.9534, + "step": 23370 + }, + { + "epoch": 0.1907129516405134, + "grad_norm": 0.5852957367897034, + "learning_rate": 4.952272983904164e-05, + "loss": 2.9504, + "step": 23400 + }, + { + "epoch": 0.19095745542466794, + "grad_norm": 0.575799822807312, + "learning_rate": 4.9521482537604345e-05, + "loss": 2.9356, + "step": 23430 + }, + { + "epoch": 0.19120195920882244, + "grad_norm": 0.5817368626594543, + "learning_rate": 4.9520233624188585e-05, + "loss": 2.9553, + "step": 23460 + }, + { + "epoch": 0.19144646299297693, + "grad_norm": 0.5669567584991455, + "learning_rate": 4.951898309887646e-05, + "loss": 2.9629, + "step": 23490 + }, + { + "epoch": 0.19169096677713143, + "grad_norm": 0.6092284321784973, + "learning_rate": 4.951773096175016e-05, + "loss": 2.9396, + "step": 23520 + }, + { + "epoch": 0.19193547056128593, + "grad_norm": 0.5807421803474426, + "learning_rate": 4.9516477212892024e-05, + "loss": 2.9425, + "step": 23550 + }, + { + "epoch": 0.19217997434544046, + "grad_norm": 0.5865337252616882, + "learning_rate": 4.9515221852384455e-05, + "loss": 2.9476, + "step": 23580 + }, + { + "epoch": 0.19242447812959496, + "grad_norm": 0.583296000957489, + "learning_rate": 4.9513964880309974e-05, + "loss": 2.9411, + "step": 23610 + }, + { + "epoch": 0.19266898191374945, + "grad_norm": 0.5733707547187805, + "learning_rate": 4.951270629675121e-05, + "loss": 2.9603, + "step": 23640 + }, + { + "epoch": 0.19291348569790395, + "grad_norm": 0.5758892297744751, + "learning_rate": 4.951144610179092e-05, + "loss": 2.9481, + "step": 23670 + }, + { + "epoch": 0.19315798948205848, + "grad_norm": 0.5843274593353271, + "learning_rate": 4.9510184295511915e-05, + "loss": 2.9508, + "step": 23700 + }, + { + "epoch": 0.19340249326621298, + "grad_norm": 0.5948002338409424, + "learning_rate": 4.950892087799716e-05, + "loss": 2.9555, + "step": 23730 + }, + { + "epoch": 0.19364699705036748, + "grad_norm": 0.6155719757080078, + "learning_rate": 4.950765584932971e-05, + "loss": 2.957, + "step": 23760 + }, + { + "epoch": 0.19389150083452197, + "grad_norm": 0.5775892734527588, + "learning_rate": 4.950638920959273e-05, + "loss": 2.9414, + "step": 23790 + }, + { + "epoch": 0.19413600461867647, + "grad_norm": 0.5580487847328186, + "learning_rate": 4.950512095886947e-05, + "loss": 2.9214, + "step": 23820 + }, + { + "epoch": 0.194380508402831, + "grad_norm": 0.5733426213264465, + "learning_rate": 4.950385109724331e-05, + "loss": 2.9431, + "step": 23850 + }, + { + "epoch": 0.1946250121869855, + "grad_norm": 0.5649263262748718, + "learning_rate": 4.950257962479773e-05, + "loss": 2.949, + "step": 23880 + }, + { + "epoch": 0.19486951597114, + "grad_norm": 0.5632926225662231, + "learning_rate": 4.95013065416163e-05, + "loss": 2.9438, + "step": 23910 + }, + { + "epoch": 0.1951140197552945, + "grad_norm": 0.5577961206436157, + "learning_rate": 4.950003184778272e-05, + "loss": 2.9253, + "step": 23940 + }, + { + "epoch": 0.195358523539449, + "grad_norm": 0.5746459364891052, + "learning_rate": 4.9498755543380795e-05, + "loss": 2.9316, + "step": 23970 + }, + { + "epoch": 0.19560302732360352, + "grad_norm": 0.5570378303527832, + "learning_rate": 4.949747762849441e-05, + "loss": 2.9356, + "step": 24000 + }, + { + "epoch": 0.19584753110775802, + "grad_norm": 0.5748524069786072, + "learning_rate": 4.949619810320757e-05, + "loss": 2.9405, + "step": 24030 + }, + { + "epoch": 0.19609203489191251, + "grad_norm": 0.5601252913475037, + "learning_rate": 4.9494916967604404e-05, + "loss": 2.9411, + "step": 24060 + }, + { + "epoch": 0.196336538676067, + "grad_norm": 0.6282344460487366, + "learning_rate": 4.949363422176911e-05, + "loss": 2.9335, + "step": 24090 + }, + { + "epoch": 0.19658104246022154, + "grad_norm": 0.5971565842628479, + "learning_rate": 4.9492349865786034e-05, + "loss": 2.9311, + "step": 24120 + }, + { + "epoch": 0.19682554624437604, + "grad_norm": 0.5764152407646179, + "learning_rate": 4.9491063899739596e-05, + "loss": 2.9343, + "step": 24150 + }, + { + "epoch": 0.19707005002853054, + "grad_norm": 0.5799585580825806, + "learning_rate": 4.948977632371432e-05, + "loss": 2.9357, + "step": 24180 + }, + { + "epoch": 0.19731455381268503, + "grad_norm": 0.6082672476768494, + "learning_rate": 4.948848713779487e-05, + "loss": 2.9373, + "step": 24210 + }, + { + "epoch": 0.19755905759683953, + "grad_norm": 0.5643133521080017, + "learning_rate": 4.948719634206598e-05, + "loss": 2.9291, + "step": 24240 + }, + { + "epoch": 0.19780356138099406, + "grad_norm": 0.5771824717521667, + "learning_rate": 4.9485903936612504e-05, + "loss": 2.969, + "step": 24270 + }, + { + "epoch": 0.19804806516514856, + "grad_norm": 0.5739302039146423, + "learning_rate": 4.948460992151941e-05, + "loss": 2.9263, + "step": 24300 + }, + { + "epoch": 0.19829256894930306, + "grad_norm": 0.5707888007164001, + "learning_rate": 4.948331429687175e-05, + "loss": 2.948, + "step": 24330 + }, + { + "epoch": 0.19853707273345755, + "grad_norm": 0.5753827095031738, + "learning_rate": 4.9482017062754704e-05, + "loss": 2.9476, + "step": 24360 + }, + { + "epoch": 0.19878157651761208, + "grad_norm": 0.5887372493743896, + "learning_rate": 4.9480718219253555e-05, + "loss": 2.9257, + "step": 24390 + }, + { + "epoch": 0.19902608030176658, + "grad_norm": 0.5863837599754333, + "learning_rate": 4.947941776645366e-05, + "loss": 2.9433, + "step": 24420 + }, + { + "epoch": 0.19927058408592108, + "grad_norm": 0.5440607070922852, + "learning_rate": 4.9478115704440545e-05, + "loss": 2.9332, + "step": 24450 + }, + { + "epoch": 0.19951508787007557, + "grad_norm": 0.5498874187469482, + "learning_rate": 4.947681203329977e-05, + "loss": 2.9388, + "step": 24480 + }, + { + "epoch": 0.19975959165423007, + "grad_norm": 0.5828962326049805, + "learning_rate": 4.947550675311706e-05, + "loss": 2.9198, + "step": 24510 + }, + { + "epoch": 0.2000040954383846, + "grad_norm": 0.5751544833183289, + "learning_rate": 4.94741998639782e-05, + "loss": 2.9326, + "step": 24540 + }, + { + "epoch": 0.2002485992225391, + "grad_norm": 0.5590956807136536, + "learning_rate": 4.947289136596912e-05, + "loss": 2.9443, + "step": 24570 + }, + { + "epoch": 0.2004931030066936, + "grad_norm": 0.575299084186554, + "learning_rate": 4.9471581259175816e-05, + "loss": 2.9392, + "step": 24600 + }, + { + "epoch": 0.2007376067908481, + "grad_norm": 0.5502616167068481, + "learning_rate": 4.9470269543684436e-05, + "loss": 2.9482, + "step": 24630 + }, + { + "epoch": 0.2009821105750026, + "grad_norm": 0.5680727958679199, + "learning_rate": 4.9468956219581186e-05, + "loss": 2.9367, + "step": 24660 + }, + { + "epoch": 0.20122661435915712, + "grad_norm": 0.6132157444953918, + "learning_rate": 4.946764128695242e-05, + "loss": 2.9319, + "step": 24690 + }, + { + "epoch": 0.20147111814331162, + "grad_norm": 0.5580316185951233, + "learning_rate": 4.9466324745884565e-05, + "loss": 2.9407, + "step": 24720 + }, + { + "epoch": 0.20171562192746612, + "grad_norm": 0.5621688961982727, + "learning_rate": 4.946500659646417e-05, + "loss": 2.9404, + "step": 24750 + }, + { + "epoch": 0.20196012571162061, + "grad_norm": 0.5874739289283752, + "learning_rate": 4.9463686838777887e-05, + "loss": 2.9338, + "step": 24780 + }, + { + "epoch": 0.20220462949577514, + "grad_norm": 0.575962483882904, + "learning_rate": 4.946236547291248e-05, + "loss": 2.9223, + "step": 24810 + }, + { + "epoch": 0.20244913327992964, + "grad_norm": 0.5589853525161743, + "learning_rate": 4.94610424989548e-05, + "loss": 2.9414, + "step": 24840 + }, + { + "epoch": 0.20269363706408414, + "grad_norm": 0.6121220588684082, + "learning_rate": 4.9459717916991834e-05, + "loss": 2.9061, + "step": 24870 + }, + { + "epoch": 0.20293814084823864, + "grad_norm": 0.5621205568313599, + "learning_rate": 4.945839172711063e-05, + "loss": 2.9375, + "step": 24900 + }, + { + "epoch": 0.20318264463239313, + "grad_norm": 0.5536648631095886, + "learning_rate": 4.9457063929398394e-05, + "loss": 2.9395, + "step": 24930 + }, + { + "epoch": 0.20342714841654766, + "grad_norm": 0.5474145412445068, + "learning_rate": 4.94557345239424e-05, + "loss": 2.9126, + "step": 24960 + }, + { + "epoch": 0.20367165220070216, + "grad_norm": 0.5837785601615906, + "learning_rate": 4.945440351083003e-05, + "loss": 2.9347, + "step": 24990 + }, + { + "epoch": 0.20391615598485666, + "grad_norm": 0.5928856134414673, + "learning_rate": 4.94530708901488e-05, + "loss": 2.9504, + "step": 25020 + }, + { + "epoch": 0.20416065976901115, + "grad_norm": 0.5786676406860352, + "learning_rate": 4.9451736661986306e-05, + "loss": 2.9265, + "step": 25050 + }, + { + "epoch": 0.20440516355316568, + "grad_norm": 0.5716553330421448, + "learning_rate": 4.945040082643026e-05, + "loss": 2.9159, + "step": 25080 + }, + { + "epoch": 0.20464966733732018, + "grad_norm": 0.676673412322998, + "learning_rate": 4.944906338356846e-05, + "loss": 2.9183, + "step": 25110 + }, + { + "epoch": 0.20489417112147468, + "grad_norm": 0.556983232498169, + "learning_rate": 4.944772433348884e-05, + "loss": 2.9248, + "step": 25140 + }, + { + "epoch": 0.20513867490562918, + "grad_norm": 0.5894055962562561, + "learning_rate": 4.9446383676279435e-05, + "loss": 2.9333, + "step": 25170 + }, + { + "epoch": 0.20538317868978367, + "grad_norm": 0.5735295414924622, + "learning_rate": 4.944504141202836e-05, + "loss": 2.9187, + "step": 25200 + } + ], + "logging_steps": 30, + "max_steps": 368091, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 100, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 2.452144150020096e+19, + "train_batch_size": 2, + "trial_name": null, + "trial_params": null +}