|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.03993561807856905, |
|
"eval_steps": 100000000, |
|
"global_step": 4900, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 8.15012613848348e-06, |
|
"grad_norm": 43.29102325439453, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 81.0533, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0002445037841545044, |
|
"grad_norm": 42.03493881225586, |
|
"learning_rate": 3e-06, |
|
"loss": 70.3325, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0004890075683090088, |
|
"grad_norm": 10.497812271118164, |
|
"learning_rate": 6e-06, |
|
"loss": 23.8844, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0007335113524635132, |
|
"grad_norm": 6.692718029022217, |
|
"learning_rate": 9e-06, |
|
"loss": 11.7198, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0009780151366180175, |
|
"grad_norm": 9.11446475982666, |
|
"learning_rate": 1.2e-05, |
|
"loss": 10.3199, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.001222518920772522, |
|
"grad_norm": 16.92243766784668, |
|
"learning_rate": 1.5e-05, |
|
"loss": 9.5338, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0014670227049270264, |
|
"grad_norm": 12.929216384887695, |
|
"learning_rate": 1.8e-05, |
|
"loss": 8.9114, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0017115264890815308, |
|
"grad_norm": 8.830116271972656, |
|
"learning_rate": 2.1e-05, |
|
"loss": 8.3867, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.001956030273236035, |
|
"grad_norm": 7.348124980926514, |
|
"learning_rate": 2.4e-05, |
|
"loss": 8.0113, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.0022005340573905395, |
|
"grad_norm": 12.751787185668945, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 7.6093, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.002445037841545044, |
|
"grad_norm": 15.10444164276123, |
|
"learning_rate": 3e-05, |
|
"loss": 7.3484, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.0026895416256995483, |
|
"grad_norm": 6.653383731842041, |
|
"learning_rate": 3.3e-05, |
|
"loss": 7.2105, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.0029340454098540527, |
|
"grad_norm": 6.986039161682129, |
|
"learning_rate": 3.6e-05, |
|
"loss": 7.0228, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.003178549194008557, |
|
"grad_norm": 6.230088710784912, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 6.8948, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.0034230529781630616, |
|
"grad_norm": 8.170981407165527, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.6965, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.0036675567623175656, |
|
"grad_norm": 5.6345930099487305, |
|
"learning_rate": 4.5e-05, |
|
"loss": 6.5988, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.00391206054647207, |
|
"grad_norm": 5.156513214111328, |
|
"learning_rate": 4.8e-05, |
|
"loss": 6.4795, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.0041565643306265745, |
|
"grad_norm": 5.4964189529418945, |
|
"learning_rate": 4.999999990869806e-05, |
|
"loss": 6.333, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.004401068114781079, |
|
"grad_norm": 3.67378306388855, |
|
"learning_rate": 4.999999853916893e-05, |
|
"loss": 6.2208, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.004645571898935583, |
|
"grad_norm": 8.507222175598145, |
|
"learning_rate": 4.9999995526204936e-05, |
|
"loss": 6.1097, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.004890075683090088, |
|
"grad_norm": 3.756618022918701, |
|
"learning_rate": 4.999999086980628e-05, |
|
"loss": 5.9886, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.005134579467244592, |
|
"grad_norm": 3.8149781227111816, |
|
"learning_rate": 4.999998456997326e-05, |
|
"loss": 5.8779, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.005379083251399097, |
|
"grad_norm": 3.840543270111084, |
|
"learning_rate": 4.999997662670628e-05, |
|
"loss": 5.805, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.005623587035553601, |
|
"grad_norm": 3.4208931922912598, |
|
"learning_rate": 4.999996704000589e-05, |
|
"loss": 5.6992, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.0058680908197081055, |
|
"grad_norm": 3.2975683212280273, |
|
"learning_rate": 4.99999558098727e-05, |
|
"loss": 5.6531, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.00611259460386261, |
|
"grad_norm": 4.05631160736084, |
|
"learning_rate": 4.9999942936307445e-05, |
|
"loss": 5.554, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.006357098388017114, |
|
"grad_norm": 3.1539864540100098, |
|
"learning_rate": 4.9999928419310994e-05, |
|
"loss": 5.4931, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.006601602172171619, |
|
"grad_norm": 4.811732292175293, |
|
"learning_rate": 4.999991225888427e-05, |
|
"loss": 5.4204, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.006846105956326123, |
|
"grad_norm": 2.9593210220336914, |
|
"learning_rate": 4.999989445502837e-05, |
|
"loss": 5.3687, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.007090609740480627, |
|
"grad_norm": 3.942239284515381, |
|
"learning_rate": 4.9999875007744436e-05, |
|
"loss": 5.3238, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.007335113524635131, |
|
"grad_norm": 2.29752254486084, |
|
"learning_rate": 4.9999853917033756e-05, |
|
"loss": 5.2423, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.007579617308789636, |
|
"grad_norm": 2.243770122528076, |
|
"learning_rate": 4.999983118289773e-05, |
|
"loss": 5.2384, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.00782412109294414, |
|
"grad_norm": 2.5572686195373535, |
|
"learning_rate": 4.999980680533782e-05, |
|
"loss": 5.1761, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.008068624877098645, |
|
"grad_norm": 2.4739913940429688, |
|
"learning_rate": 4.999978078435567e-05, |
|
"loss": 5.1215, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.008313128661253149, |
|
"grad_norm": 1.6927807331085205, |
|
"learning_rate": 4.999975311995295e-05, |
|
"loss": 5.1264, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.008557632445407654, |
|
"grad_norm": 2.3166885375976562, |
|
"learning_rate": 4.99997238121315e-05, |
|
"loss": 5.0469, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.008802136229562158, |
|
"grad_norm": 1.69430410861969, |
|
"learning_rate": 4.999969286089325e-05, |
|
"loss": 5.005, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.009046640013716663, |
|
"grad_norm": 2.0859012603759766, |
|
"learning_rate": 4.9999660266240235e-05, |
|
"loss": 4.9693, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.009291143797871167, |
|
"grad_norm": 1.7291619777679443, |
|
"learning_rate": 4.9999626028174585e-05, |
|
"loss": 4.9352, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.00953564758202567, |
|
"grad_norm": 1.5966330766677856, |
|
"learning_rate": 4.999959014669856e-05, |
|
"loss": 4.87, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.009780151366180176, |
|
"grad_norm": 1.9011143445968628, |
|
"learning_rate": 4.9999552621814513e-05, |
|
"loss": 4.8772, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.010024655150334679, |
|
"grad_norm": 1.701004981994629, |
|
"learning_rate": 4.9999513453524917e-05, |
|
"loss": 4.8484, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.010269158934489184, |
|
"grad_norm": 2.171808958053589, |
|
"learning_rate": 4.9999472641832336e-05, |
|
"loss": 4.8233, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.010513662718643688, |
|
"grad_norm": 1.4896094799041748, |
|
"learning_rate": 4.999943018673946e-05, |
|
"loss": 4.7921, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.010758166502798193, |
|
"grad_norm": 2.112971544265747, |
|
"learning_rate": 4.999938608824909e-05, |
|
"loss": 4.7908, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.011002670286952697, |
|
"grad_norm": 1.7113062143325806, |
|
"learning_rate": 4.999934034636411e-05, |
|
"loss": 4.7186, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.011247174071107202, |
|
"grad_norm": 2.099663019180298, |
|
"learning_rate": 4.999929296108753e-05, |
|
"loss": 4.7112, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.011491677855261706, |
|
"grad_norm": 1.40205979347229, |
|
"learning_rate": 4.9999243932422466e-05, |
|
"loss": 4.6701, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.011736181639416211, |
|
"grad_norm": 1.53098726272583, |
|
"learning_rate": 4.999919326037215e-05, |
|
"loss": 4.6786, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.011980685423570715, |
|
"grad_norm": 1.7461609840393066, |
|
"learning_rate": 4.99991409449399e-05, |
|
"loss": 4.6614, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.01222518920772522, |
|
"grad_norm": 1.6045819520950317, |
|
"learning_rate": 4.999908698612916e-05, |
|
"loss": 4.6233, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.012469692991879723, |
|
"grad_norm": 1.351077675819397, |
|
"learning_rate": 4.9999031383943486e-05, |
|
"loss": 4.626, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.012714196776034229, |
|
"grad_norm": 1.4363055229187012, |
|
"learning_rate": 4.999897413838651e-05, |
|
"loss": 4.5781, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.012958700560188732, |
|
"grad_norm": 1.1321336030960083, |
|
"learning_rate": 4.999891524946202e-05, |
|
"loss": 4.5492, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.013203204344343238, |
|
"grad_norm": 1.2755215167999268, |
|
"learning_rate": 4.999885471717387e-05, |
|
"loss": 4.5444, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.013447708128497741, |
|
"grad_norm": 1.471504807472229, |
|
"learning_rate": 4.999879254152605e-05, |
|
"loss": 4.5247, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.013692211912652246, |
|
"grad_norm": 1.3664900064468384, |
|
"learning_rate": 4.999872872252265e-05, |
|
"loss": 4.5158, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.01393671569680675, |
|
"grad_norm": 1.360611915588379, |
|
"learning_rate": 4.999866326016785e-05, |
|
"loss": 4.498, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.014181219480961254, |
|
"grad_norm": 1.43533456325531, |
|
"learning_rate": 4.999859615446596e-05, |
|
"loss": 4.5035, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.014425723265115759, |
|
"grad_norm": 1.4639947414398193, |
|
"learning_rate": 4.99985274054214e-05, |
|
"loss": 4.4533, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.014670227049270262, |
|
"grad_norm": 1.438585638999939, |
|
"learning_rate": 4.999845701303868e-05, |
|
"loss": 4.4484, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.014914730833424768, |
|
"grad_norm": 1.1868066787719727, |
|
"learning_rate": 4.999838497732243e-05, |
|
"loss": 4.4516, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.015159234617579271, |
|
"grad_norm": 1.1804672479629517, |
|
"learning_rate": 4.999831129827739e-05, |
|
"loss": 4.4378, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.015403738401733777, |
|
"grad_norm": 1.127652645111084, |
|
"learning_rate": 4.9998235975908394e-05, |
|
"loss": 4.4, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.01564824218588828, |
|
"grad_norm": 1.2617137432098389, |
|
"learning_rate": 4.99981590102204e-05, |
|
"loss": 4.3802, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.015892745970042785, |
|
"grad_norm": 1.4249097108840942, |
|
"learning_rate": 4.9998080401218464e-05, |
|
"loss": 4.3689, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.01613724975419729, |
|
"grad_norm": 1.0717036724090576, |
|
"learning_rate": 4.999800014890777e-05, |
|
"loss": 4.3645, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.016381753538351793, |
|
"grad_norm": 1.414404273033142, |
|
"learning_rate": 4.9997918253293555e-05, |
|
"loss": 4.343, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.016626257322506298, |
|
"grad_norm": 1.1941649913787842, |
|
"learning_rate": 4.999783471438124e-05, |
|
"loss": 4.3271, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.016870761106660803, |
|
"grad_norm": 1.0737369060516357, |
|
"learning_rate": 4.999774953217631e-05, |
|
"loss": 4.3223, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.01711526489081531, |
|
"grad_norm": 1.2367453575134277, |
|
"learning_rate": 4.9997662706684345e-05, |
|
"loss": 4.3106, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.01735976867496981, |
|
"grad_norm": 1.0919674634933472, |
|
"learning_rate": 4.999757423791107e-05, |
|
"loss": 4.2916, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.017604272459124316, |
|
"grad_norm": 1.2348527908325195, |
|
"learning_rate": 4.9997484125862306e-05, |
|
"loss": 4.2935, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.01784877624327882, |
|
"grad_norm": 1.173093318939209, |
|
"learning_rate": 4.999739237054395e-05, |
|
"loss": 4.2761, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.018093280027433326, |
|
"grad_norm": 1.1982988119125366, |
|
"learning_rate": 4.9997298971962065e-05, |
|
"loss": 4.2619, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.018337783811587828, |
|
"grad_norm": 1.4401408433914185, |
|
"learning_rate": 4.999720393012277e-05, |
|
"loss": 4.2672, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.018582287595742333, |
|
"grad_norm": 1.0311753749847412, |
|
"learning_rate": 4.999710724503233e-05, |
|
"loss": 4.2272, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.01882679137989684, |
|
"grad_norm": 1.4242304563522339, |
|
"learning_rate": 4.9997008916697075e-05, |
|
"loss": 4.2415, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.01907129516405134, |
|
"grad_norm": 1.039548397064209, |
|
"learning_rate": 4.999690894512349e-05, |
|
"loss": 4.2086, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.019315798948205846, |
|
"grad_norm": 1.1213486194610596, |
|
"learning_rate": 4.999680733031814e-05, |
|
"loss": 4.2043, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.01956030273236035, |
|
"grad_norm": 1.0591011047363281, |
|
"learning_rate": 4.9996704072287716e-05, |
|
"loss": 4.1894, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.019804806516514856, |
|
"grad_norm": 1.0503829717636108, |
|
"learning_rate": 4.9996599171038984e-05, |
|
"loss": 4.184, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.020049310300669358, |
|
"grad_norm": 1.068516492843628, |
|
"learning_rate": 4.999649262657886e-05, |
|
"loss": 4.1918, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.020293814084823864, |
|
"grad_norm": 0.9620047211647034, |
|
"learning_rate": 4.999638443891434e-05, |
|
"loss": 4.182, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.02053831786897837, |
|
"grad_norm": 0.9728343486785889, |
|
"learning_rate": 4.999627460805253e-05, |
|
"loss": 4.1746, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.020782821653132874, |
|
"grad_norm": 0.990840494632721, |
|
"learning_rate": 4.999616313400066e-05, |
|
"loss": 4.1339, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.021027325437287376, |
|
"grad_norm": 1.0678601264953613, |
|
"learning_rate": 4.999605001676605e-05, |
|
"loss": 4.1548, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.02127182922144188, |
|
"grad_norm": 1.1837236881256104, |
|
"learning_rate": 4.9995935256356144e-05, |
|
"loss": 4.1508, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.021516333005596387, |
|
"grad_norm": 1.0381749868392944, |
|
"learning_rate": 4.9995818852778476e-05, |
|
"loss": 4.0977, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.021760836789750892, |
|
"grad_norm": 0.9649094343185425, |
|
"learning_rate": 4.999570080604071e-05, |
|
"loss": 4.1405, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.022005340573905394, |
|
"grad_norm": 1.1858196258544922, |
|
"learning_rate": 4.99955811161506e-05, |
|
"loss": 4.1242, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.0222498443580599, |
|
"grad_norm": 1.035134196281433, |
|
"learning_rate": 4.9995459783116004e-05, |
|
"loss": 4.1042, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.022494348142214404, |
|
"grad_norm": 1.0874470472335815, |
|
"learning_rate": 4.999533680694493e-05, |
|
"loss": 4.0951, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.02273885192636891, |
|
"grad_norm": 1.5923190116882324, |
|
"learning_rate": 4.9995212187645416e-05, |
|
"loss": 4.1031, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.02298335571052341, |
|
"grad_norm": 0.935354471206665, |
|
"learning_rate": 4.9995085925225693e-05, |
|
"loss": 4.0804, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.023227859494677917, |
|
"grad_norm": 1.6059459447860718, |
|
"learning_rate": 4.999495801969404e-05, |
|
"loss": 4.0703, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.023472363278832422, |
|
"grad_norm": 1.1116045713424683, |
|
"learning_rate": 4.9994828471058876e-05, |
|
"loss": 4.0579, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.023716867062986924, |
|
"grad_norm": 1.0068392753601074, |
|
"learning_rate": 4.9994697279328714e-05, |
|
"loss": 4.0635, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.02396137084714143, |
|
"grad_norm": 0.9715657234191895, |
|
"learning_rate": 4.9994564444512176e-05, |
|
"loss": 4.068, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.024205874631295934, |
|
"grad_norm": 1.1550418138504028, |
|
"learning_rate": 4.9994429966618e-05, |
|
"loss": 4.0416, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.02445037841545044, |
|
"grad_norm": 1.1463241577148438, |
|
"learning_rate": 4.999429384565502e-05, |
|
"loss": 4.0224, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.02469488219960494, |
|
"grad_norm": 0.9614786505699158, |
|
"learning_rate": 4.999415608163217e-05, |
|
"loss": 4.0092, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.024939385983759447, |
|
"grad_norm": 1.0928614139556885, |
|
"learning_rate": 4.999401667455854e-05, |
|
"loss": 4.0083, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.025183889767913952, |
|
"grad_norm": 1.0884851217269897, |
|
"learning_rate": 4.9993875624443274e-05, |
|
"loss": 4.0251, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.025428393552068457, |
|
"grad_norm": 1.0158635377883911, |
|
"learning_rate": 4.9993732931295646e-05, |
|
"loss": 3.9629, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.02567289733622296, |
|
"grad_norm": 1.0032926797866821, |
|
"learning_rate": 4.999358859512503e-05, |
|
"loss": 4.0015, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.025917401120377465, |
|
"grad_norm": 0.9303460121154785, |
|
"learning_rate": 4.9993442615940936e-05, |
|
"loss": 4.0066, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.02616190490453197, |
|
"grad_norm": 1.0750280618667603, |
|
"learning_rate": 4.999329499375292e-05, |
|
"loss": 3.9937, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.026406408688686475, |
|
"grad_norm": 1.2123857736587524, |
|
"learning_rate": 4.999314572857074e-05, |
|
"loss": 3.9891, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.026650912472840977, |
|
"grad_norm": 0.988030731678009, |
|
"learning_rate": 4.9992994820404174e-05, |
|
"loss": 3.9779, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.026895416256995482, |
|
"grad_norm": 1.0025092363357544, |
|
"learning_rate": 4.999284226926314e-05, |
|
"loss": 3.9684, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.027139920041149988, |
|
"grad_norm": 0.9497402310371399, |
|
"learning_rate": 4.999268807515768e-05, |
|
"loss": 3.9542, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.027384423825304493, |
|
"grad_norm": 0.9898300766944885, |
|
"learning_rate": 4.999253223809792e-05, |
|
"loss": 3.9462, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.027628927609458995, |
|
"grad_norm": 0.8951054215431213, |
|
"learning_rate": 4.999237475809411e-05, |
|
"loss": 3.9535, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.0278734313936135, |
|
"grad_norm": 1.1410372257232666, |
|
"learning_rate": 4.99922156351566e-05, |
|
"loss": 3.9701, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.028117935177768005, |
|
"grad_norm": 0.9642576575279236, |
|
"learning_rate": 4.999205486929586e-05, |
|
"loss": 3.9532, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.028362438961922507, |
|
"grad_norm": 0.9304039478302002, |
|
"learning_rate": 4.999189246052245e-05, |
|
"loss": 3.9335, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.028606942746077012, |
|
"grad_norm": 0.9684301614761353, |
|
"learning_rate": 4.999172840884704e-05, |
|
"loss": 3.9289, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.028851446530231518, |
|
"grad_norm": 0.9566449522972107, |
|
"learning_rate": 4.999156271428043e-05, |
|
"loss": 3.92, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.029095950314386023, |
|
"grad_norm": 0.9796332120895386, |
|
"learning_rate": 4.9991395376833496e-05, |
|
"loss": 3.9173, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.029340454098540525, |
|
"grad_norm": 0.939052164554596, |
|
"learning_rate": 4.999122639651725e-05, |
|
"loss": 3.8938, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.02958495788269503, |
|
"grad_norm": 0.8773409724235535, |
|
"learning_rate": 4.9991055773342795e-05, |
|
"loss": 3.9225, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.029829461666849535, |
|
"grad_norm": 0.9443736672401428, |
|
"learning_rate": 4.9990883507321354e-05, |
|
"loss": 3.8889, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.03007396545100404, |
|
"grad_norm": 0.9129781723022461, |
|
"learning_rate": 4.999070959846424e-05, |
|
"loss": 3.894, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.030318469235158543, |
|
"grad_norm": 0.988441526889801, |
|
"learning_rate": 4.999053404678289e-05, |
|
"loss": 3.8968, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.030562973019313048, |
|
"grad_norm": 1.0134508609771729, |
|
"learning_rate": 4.999035685228884e-05, |
|
"loss": 3.873, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.030807476803467553, |
|
"grad_norm": 0.9853084087371826, |
|
"learning_rate": 4.999017801499375e-05, |
|
"loss": 3.8921, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.03105198058762206, |
|
"grad_norm": 1.0263316631317139, |
|
"learning_rate": 4.998999753490937e-05, |
|
"loss": 3.8691, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.03129648437177656, |
|
"grad_norm": 1.2496927976608276, |
|
"learning_rate": 4.998981541204757e-05, |
|
"loss": 3.8631, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.031540988155931066, |
|
"grad_norm": 1.2647733688354492, |
|
"learning_rate": 4.998963164642031e-05, |
|
"loss": 3.8676, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.03178549194008557, |
|
"grad_norm": 0.8532811403274536, |
|
"learning_rate": 4.9989446238039676e-05, |
|
"loss": 3.8196, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.032029995724240076, |
|
"grad_norm": 0.9394932985305786, |
|
"learning_rate": 4.998925918691786e-05, |
|
"loss": 3.8464, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.03227449950839458, |
|
"grad_norm": 0.9216808676719666, |
|
"learning_rate": 4.998907049306715e-05, |
|
"loss": 3.8294, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.03251900329254909, |
|
"grad_norm": 0.9735797047615051, |
|
"learning_rate": 4.998888015649996e-05, |
|
"loss": 3.8412, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.032763507076703585, |
|
"grad_norm": 0.9803655743598938, |
|
"learning_rate": 4.99886881772288e-05, |
|
"loss": 3.834, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.03300801086085809, |
|
"grad_norm": 0.8677938580513, |
|
"learning_rate": 4.998849455526628e-05, |
|
"loss": 3.8325, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.033252514645012596, |
|
"grad_norm": 1.0530720949172974, |
|
"learning_rate": 4.998829929062515e-05, |
|
"loss": 3.8125, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.0334970184291671, |
|
"grad_norm": 0.9517835378646851, |
|
"learning_rate": 4.998810238331822e-05, |
|
"loss": 3.8079, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.033741522213321606, |
|
"grad_norm": 0.9056320190429688, |
|
"learning_rate": 4.998790383335845e-05, |
|
"loss": 3.8023, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.03398602599747611, |
|
"grad_norm": 1.042631983757019, |
|
"learning_rate": 4.9987703640758894e-05, |
|
"loss": 3.8089, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.03423052978163062, |
|
"grad_norm": 1.003548502922058, |
|
"learning_rate": 4.99875018055327e-05, |
|
"loss": 3.8088, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.034475033565785115, |
|
"grad_norm": 0.9754064083099365, |
|
"learning_rate": 4.998729832769315e-05, |
|
"loss": 3.7969, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.03471953734993962, |
|
"grad_norm": 0.9784180521965027, |
|
"learning_rate": 4.998709320725361e-05, |
|
"loss": 3.8066, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.034964041134094126, |
|
"grad_norm": 0.9522731900215149, |
|
"learning_rate": 4.998688644422756e-05, |
|
"loss": 3.8062, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.03520854491824863, |
|
"grad_norm": 1.0362435579299927, |
|
"learning_rate": 4.998667803862861e-05, |
|
"loss": 3.8007, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.03545304870240314, |
|
"grad_norm": 0.899608850479126, |
|
"learning_rate": 4.9986467990470445e-05, |
|
"loss": 3.8013, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.03569755248655764, |
|
"grad_norm": 0.9725902676582336, |
|
"learning_rate": 4.998625629976688e-05, |
|
"loss": 3.7688, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.03594205627071215, |
|
"grad_norm": 0.9904030561447144, |
|
"learning_rate": 4.998604296653182e-05, |
|
"loss": 3.7503, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.03618656005486665, |
|
"grad_norm": 0.9177742004394531, |
|
"learning_rate": 4.99858279907793e-05, |
|
"loss": 3.7664, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.03643106383902115, |
|
"grad_norm": 0.964356005191803, |
|
"learning_rate": 4.998561137252346e-05, |
|
"loss": 3.7574, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.036675567623175656, |
|
"grad_norm": 1.0006535053253174, |
|
"learning_rate": 4.9985393111778525e-05, |
|
"loss": 3.7616, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.03692007140733016, |
|
"grad_norm": 0.9334198236465454, |
|
"learning_rate": 4.998517320855884e-05, |
|
"loss": 3.776, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.03716457519148467, |
|
"grad_norm": 0.9192038774490356, |
|
"learning_rate": 4.998495166287887e-05, |
|
"loss": 3.7552, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.03740907897563917, |
|
"grad_norm": 0.9278284311294556, |
|
"learning_rate": 4.998472847475318e-05, |
|
"loss": 3.7508, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.03765358275979368, |
|
"grad_norm": 0.870951771736145, |
|
"learning_rate": 4.998450364419643e-05, |
|
"loss": 3.7419, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.03789808654394818, |
|
"grad_norm": 0.8986937403678894, |
|
"learning_rate": 4.998427717122342e-05, |
|
"loss": 3.7285, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.03814259032810268, |
|
"grad_norm": 0.8922073841094971, |
|
"learning_rate": 4.9984049055849024e-05, |
|
"loss": 3.7252, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.038387094112257186, |
|
"grad_norm": 0.8716844916343689, |
|
"learning_rate": 4.9983819298088234e-05, |
|
"loss": 3.7243, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.03863159789641169, |
|
"grad_norm": 0.9074581861495972, |
|
"learning_rate": 4.9983587897956166e-05, |
|
"loss": 3.7254, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.0388761016805662, |
|
"grad_norm": 0.8903953433036804, |
|
"learning_rate": 4.998335485546802e-05, |
|
"loss": 3.7363, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.0391206054647207, |
|
"grad_norm": 1.0858160257339478, |
|
"learning_rate": 4.998312017063912e-05, |
|
"loss": 3.708, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.03936510924887521, |
|
"grad_norm": 1.1008403301239014, |
|
"learning_rate": 4.9982883843484895e-05, |
|
"loss": 3.727, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.03960961303302971, |
|
"grad_norm": 0.9895632863044739, |
|
"learning_rate": 4.998264587402088e-05, |
|
"loss": 3.7383, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.03985411681718422, |
|
"grad_norm": 1.0174016952514648, |
|
"learning_rate": 4.9982406262262726e-05, |
|
"loss": 3.6967, |
|
"step": 4890 |
|
} |
|
], |
|
"logging_steps": 30, |
|
"max_steps": 368091, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 4.76805806948352e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|