|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6188396756082346, |
|
"eval_steps": 248, |
|
"global_step": 496, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0012476606363069245, |
|
"grad_norm": 0.3369293212890625, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.4366, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002495321272613849, |
|
"grad_norm": 0.36413154006004333, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.4996, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0037429819089207735, |
|
"grad_norm": 0.3281562626361847, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.5808, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004990642545227698, |
|
"grad_norm": 0.20739880204200745, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.5504, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006238303181534623, |
|
"grad_norm": 0.3129860460758209, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.6271, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007485963817841547, |
|
"grad_norm": 0.2682516574859619, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.5808, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.008733624454148471, |
|
"grad_norm": 0.2960676848888397, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.5214, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.009981285090455396, |
|
"grad_norm": 0.1701919436454773, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.5882, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.011228945726762321, |
|
"grad_norm": 0.2361917495727539, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.5905, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.012476606363069246, |
|
"grad_norm": 0.30789047479629517, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.4745, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01372426699937617, |
|
"grad_norm": 0.3127429187297821, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.5997, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.014971927635683094, |
|
"grad_norm": 0.4279906451702118, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.4534, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.016219588271990017, |
|
"grad_norm": 0.1982978880405426, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.5678, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.017467248908296942, |
|
"grad_norm": 0.35204875469207764, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.5609, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.018714909544603867, |
|
"grad_norm": 0.3509620130062103, |
|
"learning_rate": 6e-06, |
|
"loss": 1.6769, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.019962570180910792, |
|
"grad_norm": 0.3329513370990753, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.4596, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.021210230817217717, |
|
"grad_norm": 0.2940622568130493, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.4965, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.022457891453524642, |
|
"grad_norm": 0.2941367030143738, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.498, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.023705552089831567, |
|
"grad_norm": 0.41950124502182007, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 1.5426, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.024953212726138492, |
|
"grad_norm": 0.22718903422355652, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.5473, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.026200873362445413, |
|
"grad_norm": 0.3167245388031006, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.5591, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02744853399875234, |
|
"grad_norm": 0.24035727977752686, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.5008, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.028696194635059263, |
|
"grad_norm": 0.32235047221183777, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 1.6414, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02994385527136619, |
|
"grad_norm": 0.27163419127464294, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.534, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.031191515907673113, |
|
"grad_norm": 0.19682233035564423, |
|
"learning_rate": 1e-05, |
|
"loss": 1.839, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.032439176543980035, |
|
"grad_norm": 0.19141852855682373, |
|
"learning_rate": 1.04e-05, |
|
"loss": 1.4647, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03368683718028696, |
|
"grad_norm": 0.19754448533058167, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 1.5247, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.034934497816593885, |
|
"grad_norm": 0.21717581152915955, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 1.4311, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03618215845290081, |
|
"grad_norm": 0.18977436423301697, |
|
"learning_rate": 1.16e-05, |
|
"loss": 1.4796, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.037429819089207735, |
|
"grad_norm": 0.20318299531936646, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.4515, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03867747972551466, |
|
"grad_norm": 0.18727326393127441, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 1.5346, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.039925140361821584, |
|
"grad_norm": 0.2555878460407257, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 1.4717, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.041172800998128506, |
|
"grad_norm": 0.2864621579647064, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 1.2702, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.042420461634435434, |
|
"grad_norm": 0.14290520548820496, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 1.4323, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.043668122270742356, |
|
"grad_norm": 0.14048892259597778, |
|
"learning_rate": 1.4e-05, |
|
"loss": 1.4812, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.044915782907049284, |
|
"grad_norm": 0.12439600378274918, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 1.3209, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.046163443543356206, |
|
"grad_norm": 0.11550629138946533, |
|
"learning_rate": 1.48e-05, |
|
"loss": 1.2587, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.047411104179663134, |
|
"grad_norm": 0.1708095371723175, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 1.4372, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.048658764815970056, |
|
"grad_norm": 0.12109667807817459, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 1.3698, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.049906425452276984, |
|
"grad_norm": 0.3102639615535736, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.2817, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.051154086088583905, |
|
"grad_norm": 0.08972104638814926, |
|
"learning_rate": 1.64e-05, |
|
"loss": 1.2466, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05240174672489083, |
|
"grad_norm": 0.09888940304517746, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.4295, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.053649407361197755, |
|
"grad_norm": 0.09405261278152466, |
|
"learning_rate": 1.72e-05, |
|
"loss": 1.3466, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05489706799750468, |
|
"grad_norm": 0.08513078093528748, |
|
"learning_rate": 1.76e-05, |
|
"loss": 1.4428, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.056144728633811605, |
|
"grad_norm": 0.09294164925813675, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.0991, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05739238927011853, |
|
"grad_norm": 0.09842169284820557, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 1.1395, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.058640049906425455, |
|
"grad_norm": 0.09997382760047913, |
|
"learning_rate": 1.88e-05, |
|
"loss": 1.2846, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05988771054273238, |
|
"grad_norm": 0.08888943493366241, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.2464, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0611353711790393, |
|
"grad_norm": 0.08796142041683197, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 1.2413, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06238303181534623, |
|
"grad_norm": 0.10694792866706848, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1867, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06363069245165315, |
|
"grad_norm": 0.13788695633411407, |
|
"learning_rate": 1.999999108696051e-05, |
|
"loss": 1.1745, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06487835308796007, |
|
"grad_norm": 0.10375945270061493, |
|
"learning_rate": 1.999996434785792e-05, |
|
"loss": 1.1885, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.066126013724267, |
|
"grad_norm": 0.10151001065969467, |
|
"learning_rate": 1.9999919782739897e-05, |
|
"loss": 1.1994, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06737367436057393, |
|
"grad_norm": 0.10222937166690826, |
|
"learning_rate": 1.999985739168588e-05, |
|
"loss": 1.1777, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06862133499688085, |
|
"grad_norm": 0.10297010838985443, |
|
"learning_rate": 1.9999777174807097e-05, |
|
"loss": 1.0911, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06986899563318777, |
|
"grad_norm": 0.1051320731639862, |
|
"learning_rate": 1.9999679132246536e-05, |
|
"loss": 1.1694, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07111665626949469, |
|
"grad_norm": 0.13086318969726562, |
|
"learning_rate": 1.999956326417897e-05, |
|
"loss": 1.2241, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07236431690580163, |
|
"grad_norm": 0.11484101414680481, |
|
"learning_rate": 1.9999429570810946e-05, |
|
"loss": 1.0468, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07361197754210855, |
|
"grad_norm": 0.11556559056043625, |
|
"learning_rate": 1.999927805238079e-05, |
|
"loss": 1.0311, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07485963817841547, |
|
"grad_norm": 0.11713112890720367, |
|
"learning_rate": 1.9999108709158594e-05, |
|
"loss": 1.0457, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07610729881472239, |
|
"grad_norm": 0.1211632490158081, |
|
"learning_rate": 1.9998921541446235e-05, |
|
"loss": 1.0551, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07735495945102933, |
|
"grad_norm": 0.12310254573822021, |
|
"learning_rate": 1.999871654957736e-05, |
|
"loss": 1.082, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07860262008733625, |
|
"grad_norm": 0.1967160552740097, |
|
"learning_rate": 1.9998493733917385e-05, |
|
"loss": 1.1005, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07985028072364317, |
|
"grad_norm": 0.12039731442928314, |
|
"learning_rate": 1.999825309486351e-05, |
|
"loss": 1.1583, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08109794135995009, |
|
"grad_norm": 0.1448286473751068, |
|
"learning_rate": 1.999799463284469e-05, |
|
"loss": 0.9149, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08234560199625701, |
|
"grad_norm": 0.13981443643569946, |
|
"learning_rate": 1.9997718348321672e-05, |
|
"loss": 0.9094, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08359326263256395, |
|
"grad_norm": 0.16553014516830444, |
|
"learning_rate": 1.9997424241786956e-05, |
|
"loss": 0.9401, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08484092326887087, |
|
"grad_norm": 0.16061550378799438, |
|
"learning_rate": 1.999711231376482e-05, |
|
"loss": 0.6806, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08608858390517779, |
|
"grad_norm": 0.157967671751976, |
|
"learning_rate": 1.999678256481131e-05, |
|
"loss": 0.7469, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08733624454148471, |
|
"grad_norm": 0.2233240157365799, |
|
"learning_rate": 1.9996434995514242e-05, |
|
"loss": 0.7202, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08858390517779165, |
|
"grad_norm": 0.18542423844337463, |
|
"learning_rate": 1.999606960649319e-05, |
|
"loss": 0.9828, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.08983156581409857, |
|
"grad_norm": 0.18575641512870789, |
|
"learning_rate": 1.9995686398399505e-05, |
|
"loss": 0.6079, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09107922645040549, |
|
"grad_norm": 0.15296655893325806, |
|
"learning_rate": 1.9995285371916294e-05, |
|
"loss": 0.5708, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09232688708671241, |
|
"grad_norm": 0.21740297973155975, |
|
"learning_rate": 1.9994866527758427e-05, |
|
"loss": 0.8158, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09357454772301933, |
|
"grad_norm": 0.13831280171871185, |
|
"learning_rate": 1.9994429866672543e-05, |
|
"loss": 0.7365, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09482220835932627, |
|
"grad_norm": 0.12471487373113632, |
|
"learning_rate": 1.999397538943704e-05, |
|
"loss": 0.7776, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09606986899563319, |
|
"grad_norm": 0.11238758265972137, |
|
"learning_rate": 1.9993503096862066e-05, |
|
"loss": 0.591, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09731752963194011, |
|
"grad_norm": 0.12608778476715088, |
|
"learning_rate": 1.999301298978954e-05, |
|
"loss": 0.6285, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09856519026824703, |
|
"grad_norm": 0.12795987725257874, |
|
"learning_rate": 1.9992505069093124e-05, |
|
"loss": 0.4695, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09981285090455397, |
|
"grad_norm": 0.14311431348323822, |
|
"learning_rate": 1.9991979335678242e-05, |
|
"loss": 0.7291, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10106051154086089, |
|
"grad_norm": 0.12212368845939636, |
|
"learning_rate": 1.999143579048208e-05, |
|
"loss": 0.7634, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10230817217716781, |
|
"grad_norm": 0.0974789410829544, |
|
"learning_rate": 1.9990874434473554e-05, |
|
"loss": 0.4982, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10355583281347473, |
|
"grad_norm": 0.14408168196678162, |
|
"learning_rate": 1.999029526865335e-05, |
|
"loss": 0.5957, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.10480349344978165, |
|
"grad_norm": 0.09892342239618301, |
|
"learning_rate": 1.9989698294053892e-05, |
|
"loss": 0.5647, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.10605115408608859, |
|
"grad_norm": 0.10291016846895218, |
|
"learning_rate": 1.9989083511739348e-05, |
|
"loss": 0.6392, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10729881472239551, |
|
"grad_norm": 0.15446709096431732, |
|
"learning_rate": 1.9988450922805635e-05, |
|
"loss": 0.5549, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.10854647535870243, |
|
"grad_norm": 0.08281419426202774, |
|
"learning_rate": 1.998780052838041e-05, |
|
"loss": 0.5886, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.10979413599500935, |
|
"grad_norm": 0.09740785509347916, |
|
"learning_rate": 1.998713232962308e-05, |
|
"loss": 0.6006, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11104179663131628, |
|
"grad_norm": 0.09569819271564484, |
|
"learning_rate": 1.998644632772477e-05, |
|
"loss": 0.5268, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11228945726762321, |
|
"grad_norm": 0.08716901391744614, |
|
"learning_rate": 1.998574252390836e-05, |
|
"loss": 0.5426, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11353711790393013, |
|
"grad_norm": 0.08949075639247894, |
|
"learning_rate": 1.998502091942845e-05, |
|
"loss": 0.5637, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11478477854023705, |
|
"grad_norm": 0.10287146270275116, |
|
"learning_rate": 1.9984281515571383e-05, |
|
"loss": 0.4135, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.11603243917654397, |
|
"grad_norm": 0.09598784893751144, |
|
"learning_rate": 1.9983524313655225e-05, |
|
"loss": 0.5652, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11728009981285091, |
|
"grad_norm": 0.07580853998661041, |
|
"learning_rate": 1.998274931502977e-05, |
|
"loss": 0.5787, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11852776044915783, |
|
"grad_norm": 0.0887116864323616, |
|
"learning_rate": 1.998195652107653e-05, |
|
"loss": 0.5494, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.11977542108546475, |
|
"grad_norm": 0.09782399237155914, |
|
"learning_rate": 1.998114593320876e-05, |
|
"loss": 0.4127, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12102308172177167, |
|
"grad_norm": 0.09533873945474625, |
|
"learning_rate": 1.9980317552871407e-05, |
|
"loss": 0.5192, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1222707423580786, |
|
"grad_norm": 0.08982633054256439, |
|
"learning_rate": 1.9979471381541158e-05, |
|
"loss": 0.4699, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12351840299438553, |
|
"grad_norm": 0.0871262177824974, |
|
"learning_rate": 1.9978607420726396e-05, |
|
"loss": 0.5772, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12476606363069245, |
|
"grad_norm": 0.08864148706197739, |
|
"learning_rate": 1.997772567196723e-05, |
|
"loss": 0.6495, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1260137242669994, |
|
"grad_norm": 0.5612615942955017, |
|
"learning_rate": 1.997682613683547e-05, |
|
"loss": 0.3912, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1272613849033063, |
|
"grad_norm": 0.18454571068286896, |
|
"learning_rate": 1.997590881693464e-05, |
|
"loss": 0.6133, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.12850904553961323, |
|
"grad_norm": 0.11318469792604446, |
|
"learning_rate": 1.997497371389995e-05, |
|
"loss": 0.4185, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.12975670617592014, |
|
"grad_norm": 0.09561355412006378, |
|
"learning_rate": 1.9974020829398335e-05, |
|
"loss": 0.5241, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13100436681222707, |
|
"grad_norm": 0.1466185599565506, |
|
"learning_rate": 1.9973050165128406e-05, |
|
"loss": 0.559, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.132252027448534, |
|
"grad_norm": 0.0754123404622078, |
|
"learning_rate": 1.9972061722820483e-05, |
|
"loss": 0.4629, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13349968808484092, |
|
"grad_norm": 0.10411768406629562, |
|
"learning_rate": 1.997105550423656e-05, |
|
"loss": 0.3849, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.13474734872114785, |
|
"grad_norm": 0.09193742275238037, |
|
"learning_rate": 1.997003151117034e-05, |
|
"loss": 0.3919, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.13599500935745476, |
|
"grad_norm": 0.16121919453144073, |
|
"learning_rate": 1.9968989745447203e-05, |
|
"loss": 0.6253, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1372426699937617, |
|
"grad_norm": 0.09475496411323547, |
|
"learning_rate": 1.99679302089242e-05, |
|
"loss": 0.5236, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13849033063006863, |
|
"grad_norm": 0.09533608704805374, |
|
"learning_rate": 1.9966852903490077e-05, |
|
"loss": 0.4389, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.13973799126637554, |
|
"grad_norm": 0.08322104066610336, |
|
"learning_rate": 1.996575783106524e-05, |
|
"loss": 0.5455, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14098565190268247, |
|
"grad_norm": 0.1089191734790802, |
|
"learning_rate": 1.9964644993601782e-05, |
|
"loss": 0.4437, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14223331253898938, |
|
"grad_norm": 0.117027148604393, |
|
"learning_rate": 1.9963514393083452e-05, |
|
"loss": 0.4787, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.14348097317529632, |
|
"grad_norm": 0.08248787373304367, |
|
"learning_rate": 1.9962366031525663e-05, |
|
"loss": 0.4845, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14472863381160325, |
|
"grad_norm": 0.08650378882884979, |
|
"learning_rate": 1.9961199910975497e-05, |
|
"loss": 0.3742, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.14597629444791016, |
|
"grad_norm": 0.07683917135000229, |
|
"learning_rate": 1.9960016033511692e-05, |
|
"loss": 0.4035, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1472239550842171, |
|
"grad_norm": 0.08383751660585403, |
|
"learning_rate": 1.9958814401244634e-05, |
|
"loss": 0.5135, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.14847161572052403, |
|
"grad_norm": 0.12660759687423706, |
|
"learning_rate": 1.995759501631637e-05, |
|
"loss": 0.6356, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.14971927635683094, |
|
"grad_norm": 0.09009061008691788, |
|
"learning_rate": 1.995635788090057e-05, |
|
"loss": 0.4618, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15096693699313787, |
|
"grad_norm": 0.10818025469779968, |
|
"learning_rate": 1.9955102997202575e-05, |
|
"loss": 0.5522, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15221459762944478, |
|
"grad_norm": 0.07673201709985733, |
|
"learning_rate": 1.9953830367459343e-05, |
|
"loss": 0.3801, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15346225826575172, |
|
"grad_norm": 0.09158243238925934, |
|
"learning_rate": 1.9952539993939478e-05, |
|
"loss": 0.413, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15470991890205865, |
|
"grad_norm": 0.11646990478038788, |
|
"learning_rate": 1.9951231878943207e-05, |
|
"loss": 0.4732, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.15595757953836556, |
|
"grad_norm": 0.0766897052526474, |
|
"learning_rate": 1.994990602480239e-05, |
|
"loss": 0.4355, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1572052401746725, |
|
"grad_norm": 0.10383328795433044, |
|
"learning_rate": 1.9948562433880498e-05, |
|
"loss": 0.433, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1584529008109794, |
|
"grad_norm": 0.07712090015411377, |
|
"learning_rate": 1.9947201108572636e-05, |
|
"loss": 0.4848, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.15970056144728634, |
|
"grad_norm": 0.14091262221336365, |
|
"learning_rate": 1.994582205130551e-05, |
|
"loss": 0.439, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16094822208359327, |
|
"grad_norm": 0.10989029705524445, |
|
"learning_rate": 1.9944425264537432e-05, |
|
"loss": 0.4429, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16219588271990018, |
|
"grad_norm": 0.1106753721833229, |
|
"learning_rate": 1.9943010750758335e-05, |
|
"loss": 0.4662, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16344354335620712, |
|
"grad_norm": 0.09498463571071625, |
|
"learning_rate": 1.994157851248973e-05, |
|
"loss": 0.4164, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.16469120399251402, |
|
"grad_norm": 0.0848456621170044, |
|
"learning_rate": 1.9940128552284756e-05, |
|
"loss": 0.4634, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.16593886462882096, |
|
"grad_norm": 0.06310152262449265, |
|
"learning_rate": 1.99386608727281e-05, |
|
"loss": 0.4834, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.1671865252651279, |
|
"grad_norm": 0.10514630377292633, |
|
"learning_rate": 1.9937175476436078e-05, |
|
"loss": 0.4392, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1684341859014348, |
|
"grad_norm": 0.47956305742263794, |
|
"learning_rate": 1.9935672366056557e-05, |
|
"loss": 0.4975, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.16968184653774174, |
|
"grad_norm": 0.12076468020677567, |
|
"learning_rate": 1.9934151544269e-05, |
|
"loss": 0.425, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17092950717404864, |
|
"grad_norm": 0.07710213214159012, |
|
"learning_rate": 1.9932613013784436e-05, |
|
"loss": 0.4426, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.17217716781035558, |
|
"grad_norm": 0.1181124597787857, |
|
"learning_rate": 1.993105677734546e-05, |
|
"loss": 0.4272, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.17342482844666252, |
|
"grad_norm": 0.08521714061498642, |
|
"learning_rate": 1.9929482837726227e-05, |
|
"loss": 0.4041, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17467248908296942, |
|
"grad_norm": 0.0910830870270729, |
|
"learning_rate": 1.992789119773246e-05, |
|
"loss": 0.4372, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17592014971927636, |
|
"grad_norm": 0.09005699306726456, |
|
"learning_rate": 1.9926281860201427e-05, |
|
"loss": 0.3363, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1771678103555833, |
|
"grad_norm": 0.1397104263305664, |
|
"learning_rate": 1.9924654828001947e-05, |
|
"loss": 0.5186, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1784154709918902, |
|
"grad_norm": 0.0795428678393364, |
|
"learning_rate": 1.9923010104034382e-05, |
|
"loss": 0.4258, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.17966313162819714, |
|
"grad_norm": 0.11801055818796158, |
|
"learning_rate": 1.9921347691230622e-05, |
|
"loss": 0.4102, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.18091079226450404, |
|
"grad_norm": 0.09254780411720276, |
|
"learning_rate": 1.9919667592554107e-05, |
|
"loss": 0.4247, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18215845290081098, |
|
"grad_norm": 0.09013550728559494, |
|
"learning_rate": 1.991796981099979e-05, |
|
"loss": 0.4397, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.18340611353711792, |
|
"grad_norm": 0.10347352921962738, |
|
"learning_rate": 1.991625434959415e-05, |
|
"loss": 0.3864, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18465377417342482, |
|
"grad_norm": 0.08151372522115707, |
|
"learning_rate": 1.991452121139518e-05, |
|
"loss": 0.3866, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.18590143480973176, |
|
"grad_norm": 0.07901383191347122, |
|
"learning_rate": 1.9912770399492385e-05, |
|
"loss": 0.4413, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.18714909544603867, |
|
"grad_norm": 0.10809416323900223, |
|
"learning_rate": 1.9911001917006782e-05, |
|
"loss": 0.4892, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1883967560823456, |
|
"grad_norm": 0.0716921016573906, |
|
"learning_rate": 1.990921576709088e-05, |
|
"loss": 0.377, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.18964441671865254, |
|
"grad_norm": 0.07814662158489227, |
|
"learning_rate": 1.9907411952928676e-05, |
|
"loss": 0.3738, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19089207735495944, |
|
"grad_norm": 0.0741223469376564, |
|
"learning_rate": 1.9905590477735677e-05, |
|
"loss": 0.4715, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.19213973799126638, |
|
"grad_norm": 0.11747987568378448, |
|
"learning_rate": 1.990375134475885e-05, |
|
"loss": 0.3903, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.1933873986275733, |
|
"grad_norm": 0.08143888413906097, |
|
"learning_rate": 1.9901894557276647e-05, |
|
"loss": 0.4747, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.19463505926388022, |
|
"grad_norm": 0.08514941483736038, |
|
"learning_rate": 1.9900020118598996e-05, |
|
"loss": 0.4365, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19588271990018716, |
|
"grad_norm": 0.07622472196817398, |
|
"learning_rate": 1.9898128032067287e-05, |
|
"loss": 0.4311, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.19713038053649407, |
|
"grad_norm": 0.09147831797599792, |
|
"learning_rate": 1.9896218301054365e-05, |
|
"loss": 0.4376, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.198378041172801, |
|
"grad_norm": 0.09080328047275543, |
|
"learning_rate": 1.9894290928964536e-05, |
|
"loss": 0.4817, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.19962570180910794, |
|
"grad_norm": 0.0705234482884407, |
|
"learning_rate": 1.9892345919233543e-05, |
|
"loss": 0.3602, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.20087336244541484, |
|
"grad_norm": 0.3075416088104248, |
|
"learning_rate": 1.9890383275328577e-05, |
|
"loss": 0.3686, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.20212102308172178, |
|
"grad_norm": 0.0864262729883194, |
|
"learning_rate": 1.9888403000748268e-05, |
|
"loss": 0.5076, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2033686837180287, |
|
"grad_norm": 0.07593845576047897, |
|
"learning_rate": 1.988640509902266e-05, |
|
"loss": 0.3319, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.20461634435433562, |
|
"grad_norm": 0.0860196128487587, |
|
"learning_rate": 1.988438957371324e-05, |
|
"loss": 0.4247, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.20586400499064256, |
|
"grad_norm": 0.08619983494281769, |
|
"learning_rate": 1.9882356428412887e-05, |
|
"loss": 0.4515, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.20711166562694946, |
|
"grad_norm": 0.08582403510808945, |
|
"learning_rate": 1.988030566674591e-05, |
|
"loss": 0.4861, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2083593262632564, |
|
"grad_norm": 0.09421785920858383, |
|
"learning_rate": 1.9878237292368014e-05, |
|
"loss": 0.3536, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2096069868995633, |
|
"grad_norm": 0.08474161475896835, |
|
"learning_rate": 1.9876151308966293e-05, |
|
"loss": 0.4128, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.21085464753587024, |
|
"grad_norm": 0.07902742177248001, |
|
"learning_rate": 1.987404772025924e-05, |
|
"loss": 0.3909, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.21210230817217718, |
|
"grad_norm": 0.12060698121786118, |
|
"learning_rate": 1.987192652999673e-05, |
|
"loss": 0.357, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2133499688084841, |
|
"grad_norm": 0.06981324404478073, |
|
"learning_rate": 1.9869787741960013e-05, |
|
"loss": 0.4047, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.21459762944479102, |
|
"grad_norm": 0.11325976252555847, |
|
"learning_rate": 1.9867631359961712e-05, |
|
"loss": 0.4094, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.21584529008109793, |
|
"grad_norm": 0.07985086739063263, |
|
"learning_rate": 1.9865457387845806e-05, |
|
"loss": 0.4056, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.21709295071740486, |
|
"grad_norm": 0.08571935445070267, |
|
"learning_rate": 1.9863265829487637e-05, |
|
"loss": 0.3637, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2183406113537118, |
|
"grad_norm": 0.11255648732185364, |
|
"learning_rate": 1.9861056688793896e-05, |
|
"loss": 0.4475, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2195882719900187, |
|
"grad_norm": 0.08018800616264343, |
|
"learning_rate": 1.985882996970261e-05, |
|
"loss": 0.4584, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.22083593262632564, |
|
"grad_norm": 0.07600877434015274, |
|
"learning_rate": 1.985658567618315e-05, |
|
"loss": 0.3794, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.22208359326263255, |
|
"grad_norm": 0.07584904879331589, |
|
"learning_rate": 1.985432381223621e-05, |
|
"loss": 0.3718, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.22333125389893949, |
|
"grad_norm": 0.07508600503206253, |
|
"learning_rate": 1.9852044381893806e-05, |
|
"loss": 0.4392, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.22457891453524642, |
|
"grad_norm": 0.09708955883979797, |
|
"learning_rate": 1.9849747389219272e-05, |
|
"loss": 0.3792, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22582657517155333, |
|
"grad_norm": 0.11540783196687698, |
|
"learning_rate": 1.984743283830724e-05, |
|
"loss": 0.4075, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.22707423580786026, |
|
"grad_norm": 0.08834560215473175, |
|
"learning_rate": 1.984510073328365e-05, |
|
"loss": 0.3469, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2283218964441672, |
|
"grad_norm": 0.09154046326875687, |
|
"learning_rate": 1.9842751078305735e-05, |
|
"loss": 0.411, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2295695570804741, |
|
"grad_norm": 0.07195434719324112, |
|
"learning_rate": 1.9840383877561998e-05, |
|
"loss": 0.385, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.23081721771678104, |
|
"grad_norm": 0.10304310917854309, |
|
"learning_rate": 1.9837999135272235e-05, |
|
"loss": 0.3727, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.23206487835308795, |
|
"grad_norm": 0.0923156812787056, |
|
"learning_rate": 1.983559685568751e-05, |
|
"loss": 0.4777, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.23331253898939489, |
|
"grad_norm": 0.07710360735654831, |
|
"learning_rate": 1.983317704309014e-05, |
|
"loss": 0.3419, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.23456019962570182, |
|
"grad_norm": 0.072215236723423, |
|
"learning_rate": 1.9830739701793705e-05, |
|
"loss": 0.342, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.23580786026200873, |
|
"grad_norm": 0.0766647532582283, |
|
"learning_rate": 1.982828483614303e-05, |
|
"loss": 0.3672, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.23705552089831566, |
|
"grad_norm": 0.07266882061958313, |
|
"learning_rate": 1.9825812450514174e-05, |
|
"loss": 0.4228, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23830318153462257, |
|
"grad_norm": 0.06986986100673676, |
|
"learning_rate": 1.9823322549314433e-05, |
|
"loss": 0.3741, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2395508421709295, |
|
"grad_norm": 0.08523140847682953, |
|
"learning_rate": 1.9820815136982327e-05, |
|
"loss": 0.4154, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.24079850280723644, |
|
"grad_norm": 0.07692936062812805, |
|
"learning_rate": 1.9818290217987587e-05, |
|
"loss": 0.4984, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.24204616344354335, |
|
"grad_norm": 0.11332568526268005, |
|
"learning_rate": 1.9815747796831154e-05, |
|
"loss": 0.377, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.24329382407985028, |
|
"grad_norm": 0.06331691890954971, |
|
"learning_rate": 1.9813187878045164e-05, |
|
"loss": 0.3134, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2445414847161572, |
|
"grad_norm": 0.09051994234323502, |
|
"learning_rate": 1.9810610466192955e-05, |
|
"loss": 0.3818, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.24578914535246413, |
|
"grad_norm": 0.09058686345815659, |
|
"learning_rate": 1.980801556586904e-05, |
|
"loss": 0.35, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.24703680598877106, |
|
"grad_norm": 0.07872316241264343, |
|
"learning_rate": 1.9805403181699104e-05, |
|
"loss": 0.4229, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.24828446662507797, |
|
"grad_norm": 0.07484429329633713, |
|
"learning_rate": 1.980277331834001e-05, |
|
"loss": 0.4025, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2495321272613849, |
|
"grad_norm": 0.08303457498550415, |
|
"learning_rate": 1.9800125980479768e-05, |
|
"loss": 0.4001, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25077978789769184, |
|
"grad_norm": 0.06768237799406052, |
|
"learning_rate": 1.9797461172837545e-05, |
|
"loss": 0.2898, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2520274485339988, |
|
"grad_norm": 0.10813165456056595, |
|
"learning_rate": 1.9794778900163653e-05, |
|
"loss": 0.3832, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.25327510917030566, |
|
"grad_norm": 0.07230770587921143, |
|
"learning_rate": 1.9792079167239526e-05, |
|
"loss": 0.4103, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.2545227698066126, |
|
"grad_norm": 0.07701678574085236, |
|
"learning_rate": 1.978936197887773e-05, |
|
"loss": 0.3438, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2557704304429195, |
|
"grad_norm": 0.07222580909729004, |
|
"learning_rate": 1.978662733992195e-05, |
|
"loss": 0.3693, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.25701809107922646, |
|
"grad_norm": 0.07187530398368835, |
|
"learning_rate": 1.9783875255246972e-05, |
|
"loss": 0.3753, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.2582657517155334, |
|
"grad_norm": 0.06922349333763123, |
|
"learning_rate": 1.9781105729758688e-05, |
|
"loss": 0.4076, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.2595134123518403, |
|
"grad_norm": 0.07130351662635803, |
|
"learning_rate": 1.9778318768394072e-05, |
|
"loss": 0.411, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2607610729881472, |
|
"grad_norm": 0.07194481790065765, |
|
"learning_rate": 1.9775514376121184e-05, |
|
"loss": 0.3342, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.26200873362445415, |
|
"grad_norm": 0.13812008500099182, |
|
"learning_rate": 1.977269255793915e-05, |
|
"loss": 0.3947, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2632563942607611, |
|
"grad_norm": 0.08995859324932098, |
|
"learning_rate": 1.9769853318878177e-05, |
|
"loss": 0.4331, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.264504054897068, |
|
"grad_norm": 0.07644990086555481, |
|
"learning_rate": 1.976699666399951e-05, |
|
"loss": 0.3967, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.2657517155333749, |
|
"grad_norm": 0.08435816317796707, |
|
"learning_rate": 1.9764122598395444e-05, |
|
"loss": 0.3821, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.26699937616968183, |
|
"grad_norm": 0.10862424224615097, |
|
"learning_rate": 1.976123112718931e-05, |
|
"loss": 0.3533, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.26824703680598877, |
|
"grad_norm": 0.5484840869903564, |
|
"learning_rate": 1.9758322255535468e-05, |
|
"loss": 0.4116, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2694946974422957, |
|
"grad_norm": 0.09223023056983948, |
|
"learning_rate": 1.9755395988619295e-05, |
|
"loss": 0.3947, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.27074235807860264, |
|
"grad_norm": 0.12186647951602936, |
|
"learning_rate": 1.975245233165718e-05, |
|
"loss": 0.3597, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2719900187149095, |
|
"grad_norm": 0.08124908804893494, |
|
"learning_rate": 1.974949128989651e-05, |
|
"loss": 0.4222, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.27323767935121646, |
|
"grad_norm": 0.05763809010386467, |
|
"learning_rate": 1.9746512868615656e-05, |
|
"loss": 0.3295, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.2744853399875234, |
|
"grad_norm": 0.07339370250701904, |
|
"learning_rate": 1.974351707312398e-05, |
|
"loss": 0.4178, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2757330006238303, |
|
"grad_norm": 0.10172763466835022, |
|
"learning_rate": 1.974050390876181e-05, |
|
"loss": 0.4779, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.27698066126013726, |
|
"grad_norm": 0.06345459818840027, |
|
"learning_rate": 1.973747338090043e-05, |
|
"loss": 0.3174, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.27822832189644414, |
|
"grad_norm": 0.10512516647577286, |
|
"learning_rate": 1.9734425494942097e-05, |
|
"loss": 0.3712, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.2794759825327511, |
|
"grad_norm": 0.07345092296600342, |
|
"learning_rate": 1.9731360256319982e-05, |
|
"loss": 0.3379, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.280723643169058, |
|
"grad_norm": 0.08379875868558884, |
|
"learning_rate": 1.9728277670498212e-05, |
|
"loss": 0.4001, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.28197130380536495, |
|
"grad_norm": 0.0922863557934761, |
|
"learning_rate": 1.972517774297183e-05, |
|
"loss": 0.4129, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.2832189644416719, |
|
"grad_norm": 0.07143756747245789, |
|
"learning_rate": 1.9722060479266782e-05, |
|
"loss": 0.4302, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.28446662507797876, |
|
"grad_norm": 0.07472452521324158, |
|
"learning_rate": 1.9718925884939935e-05, |
|
"loss": 0.3454, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.12237786501646042, |
|
"learning_rate": 1.9715773965579038e-05, |
|
"loss": 0.4297, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.28696194635059263, |
|
"grad_norm": 0.10508041828870773, |
|
"learning_rate": 1.9712604726802732e-05, |
|
"loss": 0.4559, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.28820960698689957, |
|
"grad_norm": 0.07032033056020737, |
|
"learning_rate": 1.9709418174260523e-05, |
|
"loss": 0.3811, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.2894572676232065, |
|
"grad_norm": 0.1084074005484581, |
|
"learning_rate": 1.9706214313632786e-05, |
|
"loss": 0.3989, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.29070492825951344, |
|
"grad_norm": 0.09418239444494247, |
|
"learning_rate": 1.9702993150630744e-05, |
|
"loss": 0.4491, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2919525888958203, |
|
"grad_norm": 0.07199984788894653, |
|
"learning_rate": 1.9699754690996476e-05, |
|
"loss": 0.408, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.29320024953212726, |
|
"grad_norm": 0.10438424348831177, |
|
"learning_rate": 1.969649894050288e-05, |
|
"loss": 0.4234, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2944479101684342, |
|
"grad_norm": 0.07043947279453278, |
|
"learning_rate": 1.9693225904953682e-05, |
|
"loss": 0.3366, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.2956955708047411, |
|
"grad_norm": 0.08545084297657013, |
|
"learning_rate": 1.968993559018342e-05, |
|
"loss": 0.506, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.29694323144104806, |
|
"grad_norm": 0.0881681963801384, |
|
"learning_rate": 1.9686628002057445e-05, |
|
"loss": 0.4111, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.29819089207735494, |
|
"grad_norm": 0.08068764954805374, |
|
"learning_rate": 1.9683303146471878e-05, |
|
"loss": 0.3981, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.2994385527136619, |
|
"grad_norm": 0.17502640187740326, |
|
"learning_rate": 1.9679961029353637e-05, |
|
"loss": 0.4002, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3006862133499688, |
|
"grad_norm": 0.09356331080198288, |
|
"learning_rate": 1.967660165666041e-05, |
|
"loss": 0.4083, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.30193387398627575, |
|
"grad_norm": 0.0942590981721878, |
|
"learning_rate": 1.9673225034380635e-05, |
|
"loss": 0.3643, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.3031815346225827, |
|
"grad_norm": 0.09707184135913849, |
|
"learning_rate": 1.9669831168533513e-05, |
|
"loss": 0.4216, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.30442919525888956, |
|
"grad_norm": 0.1203550398349762, |
|
"learning_rate": 1.9666420065168967e-05, |
|
"loss": 0.3555, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.3056768558951965, |
|
"grad_norm": 0.09338900446891785, |
|
"learning_rate": 1.9662991730367664e-05, |
|
"loss": 0.3969, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.30692451653150343, |
|
"grad_norm": 0.08367209136486053, |
|
"learning_rate": 1.9659546170240977e-05, |
|
"loss": 0.4305, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.30817217716781037, |
|
"grad_norm": 0.08842061460018158, |
|
"learning_rate": 1.965608339093099e-05, |
|
"loss": 0.3676, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.3094198378041173, |
|
"grad_norm": 0.07321275770664215, |
|
"learning_rate": 1.965260339861048e-05, |
|
"loss": 0.4718, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3094198378041173, |
|
"eval_loss": 0.37761175632476807, |
|
"eval_runtime": 2823.1422, |
|
"eval_samples_per_second": 0.253, |
|
"eval_steps_per_second": 0.126, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3106674984404242, |
|
"grad_norm": 0.12405838817358017, |
|
"learning_rate": 1.964910619948291e-05, |
|
"loss": 0.4069, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3119151590767311, |
|
"grad_norm": 0.08042417466640472, |
|
"learning_rate": 1.9645591799782416e-05, |
|
"loss": 0.4346, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.31316281971303805, |
|
"grad_norm": 0.07625039666891098, |
|
"learning_rate": 1.9642060205773793e-05, |
|
"loss": 0.4323, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.314410480349345, |
|
"grad_norm": 0.07860994338989258, |
|
"learning_rate": 1.9638511423752486e-05, |
|
"loss": 0.3794, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3156581409856519, |
|
"grad_norm": 0.10293662548065186, |
|
"learning_rate": 1.9634945460044587e-05, |
|
"loss": 0.3621, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3169058016219588, |
|
"grad_norm": 0.07868197560310364, |
|
"learning_rate": 1.9631362321006807e-05, |
|
"loss": 0.4549, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.31815346225826574, |
|
"grad_norm": 0.08555343747138977, |
|
"learning_rate": 1.9627762013026474e-05, |
|
"loss": 0.3852, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3194011228945727, |
|
"grad_norm": 0.08398033678531647, |
|
"learning_rate": 1.9624144542521538e-05, |
|
"loss": 0.3722, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3206487835308796, |
|
"grad_norm": 0.07331738620996475, |
|
"learning_rate": 1.962050991594052e-05, |
|
"loss": 0.3561, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.32189644416718655, |
|
"grad_norm": 0.07319743931293488, |
|
"learning_rate": 1.9616858139762534e-05, |
|
"loss": 0.3152, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.3231441048034934, |
|
"grad_norm": 0.11726346611976624, |
|
"learning_rate": 1.9613189220497273e-05, |
|
"loss": 0.4616, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.32439176543980036, |
|
"grad_norm": 0.10728958249092102, |
|
"learning_rate": 1.9609503164684974e-05, |
|
"loss": 0.3708, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3256394260761073, |
|
"grad_norm": 0.08347611129283905, |
|
"learning_rate": 1.9605799978896433e-05, |
|
"loss": 0.406, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.32688708671241423, |
|
"grad_norm": 0.08457101881504059, |
|
"learning_rate": 1.9602079669732975e-05, |
|
"loss": 0.3541, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.32813474734872117, |
|
"grad_norm": 0.10014831274747849, |
|
"learning_rate": 1.9598342243826457e-05, |
|
"loss": 0.4757, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.32938240798502805, |
|
"grad_norm": 0.09075610339641571, |
|
"learning_rate": 1.9594587707839242e-05, |
|
"loss": 0.4567, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.330630068621335, |
|
"grad_norm": 0.06062496826052666, |
|
"learning_rate": 1.9590816068464192e-05, |
|
"loss": 0.2811, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3318777292576419, |
|
"grad_norm": 0.10323761403560638, |
|
"learning_rate": 1.9587027332424668e-05, |
|
"loss": 0.4135, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.33312538989394885, |
|
"grad_norm": 0.0712403953075409, |
|
"learning_rate": 1.9583221506474494e-05, |
|
"loss": 0.3841, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3343730505302558, |
|
"grad_norm": 0.12237131595611572, |
|
"learning_rate": 1.957939859739797e-05, |
|
"loss": 0.3045, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.33562071116656267, |
|
"grad_norm": 0.09809068590402603, |
|
"learning_rate": 1.957555861200984e-05, |
|
"loss": 0.3938, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.3368683718028696, |
|
"grad_norm": 0.15238316357135773, |
|
"learning_rate": 1.9571701557155295e-05, |
|
"loss": 0.3669, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.33811603243917654, |
|
"grad_norm": 0.08934526145458221, |
|
"learning_rate": 1.9567827439709954e-05, |
|
"loss": 0.3647, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3393636930754835, |
|
"grad_norm": 0.0997021347284317, |
|
"learning_rate": 1.9563936266579845e-05, |
|
"loss": 0.3665, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.3406113537117904, |
|
"grad_norm": 0.09582076221704483, |
|
"learning_rate": 1.9560028044701404e-05, |
|
"loss": 0.3686, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.3418590143480973, |
|
"grad_norm": 0.1995895355939865, |
|
"learning_rate": 1.955610278104146e-05, |
|
"loss": 0.3884, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.3431066749844042, |
|
"grad_norm": 0.13371475040912628, |
|
"learning_rate": 1.955216048259722e-05, |
|
"loss": 0.4411, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.34435433562071116, |
|
"grad_norm": 0.0868840217590332, |
|
"learning_rate": 1.9548201156396255e-05, |
|
"loss": 0.4014, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.3456019962570181, |
|
"grad_norm": 0.06779041141271591, |
|
"learning_rate": 1.954422480949649e-05, |
|
"loss": 0.2838, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.34684965689332503, |
|
"grad_norm": 0.08861416578292847, |
|
"learning_rate": 1.9540231448986193e-05, |
|
"loss": 0.3724, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.34809731752963197, |
|
"grad_norm": 0.12394684553146362, |
|
"learning_rate": 1.953622108198396e-05, |
|
"loss": 0.388, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.34934497816593885, |
|
"grad_norm": 0.06695356220006943, |
|
"learning_rate": 1.9532193715638703e-05, |
|
"loss": 0.2598, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3505926388022458, |
|
"grad_norm": 0.07081321626901627, |
|
"learning_rate": 1.9528149357129637e-05, |
|
"loss": 0.3504, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3518402994385527, |
|
"grad_norm": 0.07354036718606949, |
|
"learning_rate": 1.9524088013666272e-05, |
|
"loss": 0.3052, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.35308796007485965, |
|
"grad_norm": 0.06471217423677444, |
|
"learning_rate": 1.9520009692488382e-05, |
|
"loss": 0.3449, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.3543356207111666, |
|
"grad_norm": 0.07577944546937943, |
|
"learning_rate": 1.9515914400866022e-05, |
|
"loss": 0.3782, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.35558328134747347, |
|
"grad_norm": 0.06845004856586456, |
|
"learning_rate": 1.9511802146099486e-05, |
|
"loss": 0.282, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3568309419837804, |
|
"grad_norm": 0.09513509273529053, |
|
"learning_rate": 1.9507672935519318e-05, |
|
"loss": 0.3563, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.35807860262008734, |
|
"grad_norm": 0.06815797835588455, |
|
"learning_rate": 1.9503526776486275e-05, |
|
"loss": 0.298, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3593262632563943, |
|
"grad_norm": 0.0746559351682663, |
|
"learning_rate": 1.9499363676391337e-05, |
|
"loss": 0.3185, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3605739238927012, |
|
"grad_norm": 0.10398710519075394, |
|
"learning_rate": 1.9495183642655674e-05, |
|
"loss": 0.4161, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3618215845290081, |
|
"grad_norm": 0.09173806756734848, |
|
"learning_rate": 1.949098668273065e-05, |
|
"loss": 0.4049, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.363069245165315, |
|
"grad_norm": 0.08424055576324463, |
|
"learning_rate": 1.9486772804097807e-05, |
|
"loss": 0.2972, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.36431690580162196, |
|
"grad_norm": 0.09673412144184113, |
|
"learning_rate": 1.948254201426883e-05, |
|
"loss": 0.2899, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3655645664379289, |
|
"grad_norm": 0.07556866854429245, |
|
"learning_rate": 1.9478294320785553e-05, |
|
"loss": 0.4131, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.36681222707423583, |
|
"grad_norm": 0.13352152705192566, |
|
"learning_rate": 1.9474029731219958e-05, |
|
"loss": 0.3592, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3680598877105427, |
|
"grad_norm": 0.08789486438035965, |
|
"learning_rate": 1.9469748253174132e-05, |
|
"loss": 0.4613, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.36930754834684965, |
|
"grad_norm": 0.2030758261680603, |
|
"learning_rate": 1.946544989428027e-05, |
|
"loss": 0.3532, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.3705552089831566, |
|
"grad_norm": 0.058304548263549805, |
|
"learning_rate": 1.9461134662200667e-05, |
|
"loss": 0.2787, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.3718028696194635, |
|
"grad_norm": 2.858107089996338, |
|
"learning_rate": 1.9456802564627682e-05, |
|
"loss": 0.4308, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.37305053025577045, |
|
"grad_norm": 0.07007814943790436, |
|
"learning_rate": 1.9452453609283754e-05, |
|
"loss": 0.3738, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.37429819089207733, |
|
"grad_norm": 0.07296311110258102, |
|
"learning_rate": 1.9448087803921358e-05, |
|
"loss": 0.3494, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.37554585152838427, |
|
"grad_norm": 0.0859873816370964, |
|
"learning_rate": 1.9443705156323016e-05, |
|
"loss": 0.3019, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.3767935121646912, |
|
"grad_norm": 0.06829937547445297, |
|
"learning_rate": 1.9439305674301265e-05, |
|
"loss": 0.3248, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.37804117280099814, |
|
"grad_norm": 0.13362844288349152, |
|
"learning_rate": 1.9434889365698668e-05, |
|
"loss": 0.3045, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.3792888334373051, |
|
"grad_norm": 0.07973142713308334, |
|
"learning_rate": 1.9430456238387763e-05, |
|
"loss": 0.3811, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.38053649407361195, |
|
"grad_norm": 0.08348105847835541, |
|
"learning_rate": 1.9426006300271082e-05, |
|
"loss": 0.2735, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3817841547099189, |
|
"grad_norm": 0.0983782708644867, |
|
"learning_rate": 1.9421539559281114e-05, |
|
"loss": 0.4023, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.3830318153462258, |
|
"grad_norm": 0.10556434094905853, |
|
"learning_rate": 1.9417056023380312e-05, |
|
"loss": 0.3741, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.38427947598253276, |
|
"grad_norm": 0.10459857434034348, |
|
"learning_rate": 1.941255570056106e-05, |
|
"loss": 0.4195, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.3855271366188397, |
|
"grad_norm": 0.09314591437578201, |
|
"learning_rate": 1.9408038598845672e-05, |
|
"loss": 0.4166, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.3867747972551466, |
|
"grad_norm": 0.09022191166877747, |
|
"learning_rate": 1.9403504726286367e-05, |
|
"loss": 0.3708, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3880224578914535, |
|
"grad_norm": 0.07517941296100616, |
|
"learning_rate": 1.939895409096526e-05, |
|
"loss": 0.4089, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.38927011852776044, |
|
"grad_norm": 0.08791400492191315, |
|
"learning_rate": 1.9394386700994357e-05, |
|
"loss": 0.3952, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.3905177791640674, |
|
"grad_norm": 0.08254127204418182, |
|
"learning_rate": 1.9389802564515513e-05, |
|
"loss": 0.4023, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.3917654398003743, |
|
"grad_norm": 0.09163601696491241, |
|
"learning_rate": 1.938520168970045e-05, |
|
"loss": 0.4002, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.3930131004366812, |
|
"grad_norm": 0.10828031599521637, |
|
"learning_rate": 1.938058408475073e-05, |
|
"loss": 0.3885, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.39426076107298813, |
|
"grad_norm": 0.10482138395309448, |
|
"learning_rate": 1.9375949757897725e-05, |
|
"loss": 0.3525, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.39550842170929507, |
|
"grad_norm": 0.07083115726709366, |
|
"learning_rate": 1.9371298717402625e-05, |
|
"loss": 0.3372, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.396756082345602, |
|
"grad_norm": 0.08137016743421555, |
|
"learning_rate": 1.9366630971556407e-05, |
|
"loss": 0.4064, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.39800374298190894, |
|
"grad_norm": 0.07335782796144485, |
|
"learning_rate": 1.9361946528679837e-05, |
|
"loss": 0.3163, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.39925140361821587, |
|
"grad_norm": 0.08302006125450134, |
|
"learning_rate": 1.9357245397123443e-05, |
|
"loss": 0.3502, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.40049906425452275, |
|
"grad_norm": 0.07668264955282211, |
|
"learning_rate": 1.9352527585267493e-05, |
|
"loss": 0.3226, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4017467248908297, |
|
"grad_norm": 0.10439937561750412, |
|
"learning_rate": 1.9347793101521993e-05, |
|
"loss": 0.3212, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.4029943855271366, |
|
"grad_norm": 0.10303515940904617, |
|
"learning_rate": 1.934304195432668e-05, |
|
"loss": 0.4068, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.40424204616344356, |
|
"grad_norm": 0.09272205829620361, |
|
"learning_rate": 1.9338274152150983e-05, |
|
"loss": 0.3365, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.4054897067997505, |
|
"grad_norm": 0.06783345341682434, |
|
"learning_rate": 1.933348970349402e-05, |
|
"loss": 0.3289, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.4067373674360574, |
|
"grad_norm": 0.07962651550769806, |
|
"learning_rate": 1.932868861688459e-05, |
|
"loss": 0.384, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.4079850280723643, |
|
"grad_norm": 0.08090052008628845, |
|
"learning_rate": 1.932387090088115e-05, |
|
"loss": 0.3748, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.40923268870867124, |
|
"grad_norm": 0.06662657111883163, |
|
"learning_rate": 1.9319036564071797e-05, |
|
"loss": 0.3277, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.4104803493449782, |
|
"grad_norm": 0.08814183622598648, |
|
"learning_rate": 1.9314185615074253e-05, |
|
"loss": 0.4001, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.4117280099812851, |
|
"grad_norm": 0.06779827177524567, |
|
"learning_rate": 1.9309318062535864e-05, |
|
"loss": 0.2929, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.412975670617592, |
|
"grad_norm": 0.08183370530605316, |
|
"learning_rate": 1.9304433915133567e-05, |
|
"loss": 0.3979, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.41422333125389893, |
|
"grad_norm": 0.0669468492269516, |
|
"learning_rate": 1.9299533181573883e-05, |
|
"loss": 0.3279, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.41547099189020587, |
|
"grad_norm": 0.08377107977867126, |
|
"learning_rate": 1.9294615870592892e-05, |
|
"loss": 0.3775, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.4167186525265128, |
|
"grad_norm": 0.08826244622468948, |
|
"learning_rate": 1.928968199095624e-05, |
|
"loss": 0.4081, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.41796631316281974, |
|
"grad_norm": 0.07477085292339325, |
|
"learning_rate": 1.928473155145909e-05, |
|
"loss": 0.3794, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4192139737991266, |
|
"grad_norm": 0.08051132410764694, |
|
"learning_rate": 1.9279764560926142e-05, |
|
"loss": 0.4369, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.42046163443543355, |
|
"grad_norm": 0.08119165897369385, |
|
"learning_rate": 1.927478102821159e-05, |
|
"loss": 0.4217, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4217092950717405, |
|
"grad_norm": 0.07305929064750671, |
|
"learning_rate": 1.926978096219912e-05, |
|
"loss": 0.3603, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.4229569557080474, |
|
"grad_norm": 0.08188523352146149, |
|
"learning_rate": 1.926476437180189e-05, |
|
"loss": 0.3972, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.42420461634435436, |
|
"grad_norm": 0.08422026038169861, |
|
"learning_rate": 1.925973126596251e-05, |
|
"loss": 0.3614, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.42545227698066124, |
|
"grad_norm": 0.07257169485092163, |
|
"learning_rate": 1.9254681653653037e-05, |
|
"loss": 0.3365, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.4266999376169682, |
|
"grad_norm": 0.0878138542175293, |
|
"learning_rate": 1.9249615543874947e-05, |
|
"loss": 0.4388, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.4279475982532751, |
|
"grad_norm": 0.08215674012899399, |
|
"learning_rate": 1.9244532945659132e-05, |
|
"loss": 0.4097, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.42919525888958204, |
|
"grad_norm": 0.07723066210746765, |
|
"learning_rate": 1.9239433868065867e-05, |
|
"loss": 0.3869, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.430442919525889, |
|
"grad_norm": 0.10846715420484543, |
|
"learning_rate": 1.923431832018481e-05, |
|
"loss": 0.3279, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.43169058016219586, |
|
"grad_norm": 0.08006098121404648, |
|
"learning_rate": 1.922918631113498e-05, |
|
"loss": 0.3437, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.4329382407985028, |
|
"grad_norm": 0.07850154489278793, |
|
"learning_rate": 1.922403785006473e-05, |
|
"loss": 0.32, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.43418590143480973, |
|
"grad_norm": 0.07446196675300598, |
|
"learning_rate": 1.9218872946151753e-05, |
|
"loss": 0.3478, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.43543356207111666, |
|
"grad_norm": 0.07685361057519913, |
|
"learning_rate": 1.9213691608603046e-05, |
|
"loss": 0.397, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.4366812227074236, |
|
"grad_norm": 0.08373807370662689, |
|
"learning_rate": 1.9208493846654905e-05, |
|
"loss": 0.3756, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4379288833437305, |
|
"grad_norm": 0.08705881237983704, |
|
"learning_rate": 1.9203279669572893e-05, |
|
"loss": 0.3636, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.4391765439800374, |
|
"grad_norm": 0.07912624627351761, |
|
"learning_rate": 1.9198049086651852e-05, |
|
"loss": 0.3214, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.44042420461634435, |
|
"grad_norm": 0.07962213456630707, |
|
"learning_rate": 1.9192802107215857e-05, |
|
"loss": 0.3488, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.4416718652526513, |
|
"grad_norm": 0.06611789017915726, |
|
"learning_rate": 1.9187538740618215e-05, |
|
"loss": 0.3276, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4429195258889582, |
|
"grad_norm": 0.0737161785364151, |
|
"learning_rate": 1.9182258996241446e-05, |
|
"loss": 0.344, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4441671865252651, |
|
"grad_norm": 0.0750458687543869, |
|
"learning_rate": 1.917696288349726e-05, |
|
"loss": 0.3952, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.44541484716157204, |
|
"grad_norm": 0.09110225737094879, |
|
"learning_rate": 1.9171650411826556e-05, |
|
"loss": 0.3767, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.44666250779787897, |
|
"grad_norm": 0.06446138769388199, |
|
"learning_rate": 1.9166321590699385e-05, |
|
"loss": 0.3164, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.4479101684341859, |
|
"grad_norm": 0.07691174745559692, |
|
"learning_rate": 1.916097642961494e-05, |
|
"loss": 0.4026, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.44915782907049284, |
|
"grad_norm": 0.07001764327287674, |
|
"learning_rate": 1.9155614938101557e-05, |
|
"loss": 0.3439, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4504054897067998, |
|
"grad_norm": 0.09792986512184143, |
|
"learning_rate": 1.915023712571667e-05, |
|
"loss": 0.3888, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.45165315034310666, |
|
"grad_norm": 0.07331310957670212, |
|
"learning_rate": 1.9144843002046805e-05, |
|
"loss": 0.3222, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.4529008109794136, |
|
"grad_norm": 0.06881300359964371, |
|
"learning_rate": 1.9139432576707575e-05, |
|
"loss": 0.324, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.45414847161572053, |
|
"grad_norm": 0.07379250973463058, |
|
"learning_rate": 1.9134005859343647e-05, |
|
"loss": 0.3464, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.45539613225202746, |
|
"grad_norm": 0.09389679878950119, |
|
"learning_rate": 1.9128562859628725e-05, |
|
"loss": 0.3314, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4566437928883344, |
|
"grad_norm": 0.10510922968387604, |
|
"learning_rate": 1.9123103587265545e-05, |
|
"loss": 0.3702, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.4578914535246413, |
|
"grad_norm": 0.09789140522480011, |
|
"learning_rate": 1.911762805198585e-05, |
|
"loss": 0.3061, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.4591391141609482, |
|
"grad_norm": 0.07343242317438126, |
|
"learning_rate": 1.9112136263550376e-05, |
|
"loss": 0.3674, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.46038677479725515, |
|
"grad_norm": 0.07687333226203918, |
|
"learning_rate": 1.9106628231748825e-05, |
|
"loss": 0.3227, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.4616344354335621, |
|
"grad_norm": 0.09030032902956009, |
|
"learning_rate": 1.9101103966399856e-05, |
|
"loss": 0.3499, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.462882096069869, |
|
"grad_norm": 0.09658922255039215, |
|
"learning_rate": 1.9095563477351075e-05, |
|
"loss": 0.3838, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.4641297567061759, |
|
"grad_norm": 0.42790326476097107, |
|
"learning_rate": 1.909000677447899e-05, |
|
"loss": 0.3827, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.46537741734248284, |
|
"grad_norm": 0.07787511497735977, |
|
"learning_rate": 1.9084433867689035e-05, |
|
"loss": 0.3981, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.46662507797878977, |
|
"grad_norm": 0.07825618982315063, |
|
"learning_rate": 1.9078844766915514e-05, |
|
"loss": 0.3584, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.4678727386150967, |
|
"grad_norm": 0.09160970151424408, |
|
"learning_rate": 1.9073239482121598e-05, |
|
"loss": 0.3451, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.46912039925140364, |
|
"grad_norm": 0.06959247589111328, |
|
"learning_rate": 1.9067618023299318e-05, |
|
"loss": 0.3168, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.4703680598877105, |
|
"grad_norm": 0.09179724752902985, |
|
"learning_rate": 1.9061980400469524e-05, |
|
"loss": 0.3784, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.47161572052401746, |
|
"grad_norm": 0.10104795545339584, |
|
"learning_rate": 1.9056326623681896e-05, |
|
"loss": 0.3234, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.4728633811603244, |
|
"grad_norm": 0.07413583993911743, |
|
"learning_rate": 1.9050656703014893e-05, |
|
"loss": 0.3372, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.4741110417966313, |
|
"grad_norm": 0.07793621718883514, |
|
"learning_rate": 1.9044970648575763e-05, |
|
"loss": 0.3489, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.47535870243293826, |
|
"grad_norm": 0.08598163723945618, |
|
"learning_rate": 1.9039268470500515e-05, |
|
"loss": 0.4078, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.47660636306924514, |
|
"grad_norm": 0.0972554013133049, |
|
"learning_rate": 1.903355017895389e-05, |
|
"loss": 0.3983, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.4778540237055521, |
|
"grad_norm": 0.0939214900135994, |
|
"learning_rate": 1.9027815784129365e-05, |
|
"loss": 0.3407, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.479101684341859, |
|
"grad_norm": 0.16490474343299866, |
|
"learning_rate": 1.9022065296249116e-05, |
|
"loss": 0.3577, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.48034934497816595, |
|
"grad_norm": 0.07326449453830719, |
|
"learning_rate": 1.9016298725564012e-05, |
|
"loss": 0.3071, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.4815970056144729, |
|
"grad_norm": 0.07553815096616745, |
|
"learning_rate": 1.901051608235358e-05, |
|
"loss": 0.3218, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.48284466625077976, |
|
"grad_norm": 0.08704483509063721, |
|
"learning_rate": 1.9004717376926007e-05, |
|
"loss": 0.3309, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.4840923268870867, |
|
"grad_norm": 0.07856780290603638, |
|
"learning_rate": 1.8998902619618117e-05, |
|
"loss": 0.3173, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.48533998752339363, |
|
"grad_norm": 0.19682346284389496, |
|
"learning_rate": 1.8993071820795336e-05, |
|
"loss": 0.3777, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.48658764815970057, |
|
"grad_norm": 0.21116366982460022, |
|
"learning_rate": 1.8987224990851697e-05, |
|
"loss": 0.3842, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4878353087960075, |
|
"grad_norm": 0.09021289646625519, |
|
"learning_rate": 1.8981362140209798e-05, |
|
"loss": 0.3869, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.4890829694323144, |
|
"grad_norm": 0.10774579644203186, |
|
"learning_rate": 1.897548327932081e-05, |
|
"loss": 0.3502, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.4903306300686213, |
|
"grad_norm": 0.09023426473140717, |
|
"learning_rate": 1.8969588418664434e-05, |
|
"loss": 0.4253, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.49157829070492826, |
|
"grad_norm": 0.07410671561956406, |
|
"learning_rate": 1.89636775687489e-05, |
|
"loss": 0.3141, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.4928259513412352, |
|
"grad_norm": 0.07652512937784195, |
|
"learning_rate": 1.8957750740110924e-05, |
|
"loss": 0.3277, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4940736119775421, |
|
"grad_norm": 0.0774303525686264, |
|
"learning_rate": 1.895180794331573e-05, |
|
"loss": 0.3339, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.495321272613849, |
|
"grad_norm": 0.08927808701992035, |
|
"learning_rate": 1.8945849188956986e-05, |
|
"loss": 0.362, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.49656893325015594, |
|
"grad_norm": 0.08084219694137573, |
|
"learning_rate": 1.893987448765682e-05, |
|
"loss": 0.3304, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.4978165938864629, |
|
"grad_norm": 0.08156754076480865, |
|
"learning_rate": 1.8933883850065778e-05, |
|
"loss": 0.3759, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.4990642545227698, |
|
"grad_norm": 0.14657171070575714, |
|
"learning_rate": 1.892787728686282e-05, |
|
"loss": 0.3648, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5003119151590767, |
|
"grad_norm": 0.07986622303724289, |
|
"learning_rate": 1.8921854808755295e-05, |
|
"loss": 0.3212, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.5015595757953837, |
|
"grad_norm": 0.08847914636135101, |
|
"learning_rate": 1.8915816426478914e-05, |
|
"loss": 0.4114, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.5028072364316906, |
|
"grad_norm": 0.08348351716995239, |
|
"learning_rate": 1.8909762150797752e-05, |
|
"loss": 0.4262, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.5040548970679976, |
|
"grad_norm": 0.07425021380186081, |
|
"learning_rate": 1.8903691992504204e-05, |
|
"loss": 0.3396, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.5053025577043044, |
|
"grad_norm": 0.09293273836374283, |
|
"learning_rate": 1.889760596241898e-05, |
|
"loss": 0.4121, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5065502183406113, |
|
"grad_norm": 0.08483749628067017, |
|
"learning_rate": 1.8891504071391092e-05, |
|
"loss": 0.3955, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.5077978789769183, |
|
"grad_norm": 0.09369031339883804, |
|
"learning_rate": 1.8885386330297817e-05, |
|
"loss": 0.3644, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.5090455396132252, |
|
"grad_norm": 0.06889427453279495, |
|
"learning_rate": 1.8879252750044686e-05, |
|
"loss": 0.2993, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.5102932002495322, |
|
"grad_norm": 0.07942546904087067, |
|
"learning_rate": 1.887310334156547e-05, |
|
"loss": 0.3975, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.511540860885839, |
|
"grad_norm": 0.0964096263051033, |
|
"learning_rate": 1.8866938115822148e-05, |
|
"loss": 0.4178, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5127885215221459, |
|
"grad_norm": 0.19631971418857574, |
|
"learning_rate": 1.8860757083804908e-05, |
|
"loss": 0.2797, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.5140361821584529, |
|
"grad_norm": 0.08998764306306839, |
|
"learning_rate": 1.8854560256532098e-05, |
|
"loss": 0.3698, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.5152838427947598, |
|
"grad_norm": 0.07443209737539291, |
|
"learning_rate": 1.8848347645050242e-05, |
|
"loss": 0.2805, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.5165315034310668, |
|
"grad_norm": 0.11266658455133438, |
|
"learning_rate": 1.8842119260433984e-05, |
|
"loss": 0.4253, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.5177791640673737, |
|
"grad_norm": 0.07977344840765, |
|
"learning_rate": 1.8835875113786088e-05, |
|
"loss": 0.4107, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5190268247036806, |
|
"grad_norm": 0.07444226741790771, |
|
"learning_rate": 1.8829615216237426e-05, |
|
"loss": 0.2756, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5202744853399875, |
|
"grad_norm": 0.08746916055679321, |
|
"learning_rate": 1.8823339578946935e-05, |
|
"loss": 0.3661, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5215221459762944, |
|
"grad_norm": 0.07776310294866562, |
|
"learning_rate": 1.881704821310162e-05, |
|
"loss": 0.356, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.5227698066126014, |
|
"grad_norm": 0.07319823652505875, |
|
"learning_rate": 1.8810741129916516e-05, |
|
"loss": 0.2972, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.5240174672489083, |
|
"grad_norm": 0.1166319027543068, |
|
"learning_rate": 1.880441834063468e-05, |
|
"loss": 0.3385, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5252651278852152, |
|
"grad_norm": 0.07309556007385254, |
|
"learning_rate": 1.8798079856527175e-05, |
|
"loss": 0.3663, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5265127885215222, |
|
"grad_norm": 0.08942176401615143, |
|
"learning_rate": 1.8791725688893018e-05, |
|
"loss": 0.366, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.527760449157829, |
|
"grad_norm": 0.09301096200942993, |
|
"learning_rate": 1.8785355849059204e-05, |
|
"loss": 0.3379, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.529008109794136, |
|
"grad_norm": 0.11961039900779724, |
|
"learning_rate": 1.877897034838067e-05, |
|
"loss": 0.3297, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5302557704304429, |
|
"grad_norm": 0.0748453140258789, |
|
"learning_rate": 1.8772569198240244e-05, |
|
"loss": 0.3437, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5315034310667498, |
|
"grad_norm": 0.120552197098732, |
|
"learning_rate": 1.8766152410048676e-05, |
|
"loss": 0.3903, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.5327510917030568, |
|
"grad_norm": 0.09532199800014496, |
|
"learning_rate": 1.875971999524458e-05, |
|
"loss": 0.2942, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.5339987523393637, |
|
"grad_norm": 0.07045169919729233, |
|
"learning_rate": 1.8753271965294437e-05, |
|
"loss": 0.2739, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.5352464129756707, |
|
"grad_norm": 0.0874415785074234, |
|
"learning_rate": 1.8746808331692542e-05, |
|
"loss": 0.4072, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.5364940736119775, |
|
"grad_norm": 0.09460163861513138, |
|
"learning_rate": 1.874032910596103e-05, |
|
"loss": 0.3936, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5377417342482844, |
|
"grad_norm": 0.1052878126502037, |
|
"learning_rate": 1.8733834299649818e-05, |
|
"loss": 0.3533, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.5389893948845914, |
|
"grad_norm": 0.09336161613464355, |
|
"learning_rate": 1.87273239243366e-05, |
|
"loss": 0.3473, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.5402370555208983, |
|
"grad_norm": 0.08659809827804565, |
|
"learning_rate": 1.872079799162682e-05, |
|
"loss": 0.4026, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.5414847161572053, |
|
"grad_norm": 0.0875949040055275, |
|
"learning_rate": 1.871425651315365e-05, |
|
"loss": 0.4091, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.5427323767935122, |
|
"grad_norm": 0.08493121713399887, |
|
"learning_rate": 1.8707699500577997e-05, |
|
"loss": 0.3439, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.543980037429819, |
|
"grad_norm": 0.12357327342033386, |
|
"learning_rate": 1.8701126965588433e-05, |
|
"loss": 0.4433, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.545227698066126, |
|
"grad_norm": 0.0841773971915245, |
|
"learning_rate": 1.8694538919901216e-05, |
|
"loss": 0.3916, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.5464753587024329, |
|
"grad_norm": 0.08911257982254028, |
|
"learning_rate": 1.8687935375260244e-05, |
|
"loss": 0.3349, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.5477230193387399, |
|
"grad_norm": 0.1051001101732254, |
|
"learning_rate": 1.8681316343437048e-05, |
|
"loss": 0.3307, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.5489706799750468, |
|
"grad_norm": 0.08096891641616821, |
|
"learning_rate": 1.867468183623077e-05, |
|
"loss": 0.3684, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5502183406113537, |
|
"grad_norm": 0.06814990937709808, |
|
"learning_rate": 1.866803186546813e-05, |
|
"loss": 0.2707, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.5514660012476607, |
|
"grad_norm": 0.08140693604946136, |
|
"learning_rate": 1.8661366443003426e-05, |
|
"loss": 0.3693, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.5527136618839675, |
|
"grad_norm": 0.08824186027050018, |
|
"learning_rate": 1.8654685580718482e-05, |
|
"loss": 0.3401, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.5539613225202745, |
|
"grad_norm": 0.07940424978733063, |
|
"learning_rate": 1.8647989290522667e-05, |
|
"loss": 0.3297, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.5552089831565814, |
|
"grad_norm": 0.08890607953071594, |
|
"learning_rate": 1.8641277584352832e-05, |
|
"loss": 0.3961, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5564566437928883, |
|
"grad_norm": 0.09835802763700485, |
|
"learning_rate": 1.8634550474173322e-05, |
|
"loss": 0.3718, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.5577043044291953, |
|
"grad_norm": 0.07689094543457031, |
|
"learning_rate": 1.862780797197594e-05, |
|
"loss": 0.3069, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.5589519650655022, |
|
"grad_norm": 0.07634837925434113, |
|
"learning_rate": 1.8621050089779912e-05, |
|
"loss": 0.3772, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.5601996257018091, |
|
"grad_norm": 0.08531224727630615, |
|
"learning_rate": 1.8614276839631902e-05, |
|
"loss": 0.3344, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.561447286338116, |
|
"grad_norm": 0.08031272888183594, |
|
"learning_rate": 1.860748823360596e-05, |
|
"loss": 0.419, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5626949469744229, |
|
"grad_norm": 0.09414199739694595, |
|
"learning_rate": 1.86006842838035e-05, |
|
"loss": 0.4198, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.5639426076107299, |
|
"grad_norm": 0.08871858566999435, |
|
"learning_rate": 1.8593865002353307e-05, |
|
"loss": 0.348, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.5651902682470368, |
|
"grad_norm": 0.07083774358034134, |
|
"learning_rate": 1.858703040141148e-05, |
|
"loss": 0.3334, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.5664379288833438, |
|
"grad_norm": 0.10692565143108368, |
|
"learning_rate": 1.8580180493161434e-05, |
|
"loss": 0.4008, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.5676855895196506, |
|
"grad_norm": 0.07992343604564667, |
|
"learning_rate": 1.8573315289813866e-05, |
|
"loss": 0.3971, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.5689332501559575, |
|
"grad_norm": 0.08050104975700378, |
|
"learning_rate": 1.856643480360675e-05, |
|
"loss": 0.3488, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.5701809107922645, |
|
"grad_norm": 0.07934119552373886, |
|
"learning_rate": 1.855953904680529e-05, |
|
"loss": 0.2948, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.09823425859212875, |
|
"learning_rate": 1.855262803170191e-05, |
|
"loss": 0.3186, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.5726762320648784, |
|
"grad_norm": 0.23104049265384674, |
|
"learning_rate": 1.8545701770616254e-05, |
|
"loss": 0.3841, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.5739238927011853, |
|
"grad_norm": 0.07883207499980927, |
|
"learning_rate": 1.8538760275895118e-05, |
|
"loss": 0.4124, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5751715533374921, |
|
"grad_norm": 0.07763822376728058, |
|
"learning_rate": 1.853180355991247e-05, |
|
"loss": 0.2878, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.5764192139737991, |
|
"grad_norm": 0.09326838701963425, |
|
"learning_rate": 1.852483163506941e-05, |
|
"loss": 0.322, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.577666874610106, |
|
"grad_norm": 0.08444075286388397, |
|
"learning_rate": 1.8517844513794144e-05, |
|
"loss": 0.2674, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.578914535246413, |
|
"grad_norm": 0.07540516555309296, |
|
"learning_rate": 1.8510842208541962e-05, |
|
"loss": 0.284, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.5801621958827199, |
|
"grad_norm": 0.08852772414684296, |
|
"learning_rate": 1.850382473179524e-05, |
|
"loss": 0.4359, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5814098565190269, |
|
"grad_norm": 0.07915528863668442, |
|
"learning_rate": 1.849679209606338e-05, |
|
"loss": 0.3875, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.5826575171553338, |
|
"grad_norm": 0.08570799231529236, |
|
"learning_rate": 1.8489744313882818e-05, |
|
"loss": 0.3781, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.5839051777916406, |
|
"grad_norm": 0.0977482944726944, |
|
"learning_rate": 1.8482681397816986e-05, |
|
"loss": 0.3402, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.5851528384279476, |
|
"grad_norm": 0.07194789499044418, |
|
"learning_rate": 1.847560336045629e-05, |
|
"loss": 0.3404, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.5864004990642545, |
|
"grad_norm": 0.08206473290920258, |
|
"learning_rate": 1.84685102144181e-05, |
|
"loss": 0.2855, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5876481597005615, |
|
"grad_norm": 0.1329026073217392, |
|
"learning_rate": 1.8461401972346713e-05, |
|
"loss": 0.4132, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.5888958203368684, |
|
"grad_norm": 0.11516719311475754, |
|
"learning_rate": 1.8454278646913336e-05, |
|
"loss": 0.3963, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.5901434809731753, |
|
"grad_norm": 0.08752531558275223, |
|
"learning_rate": 1.8447140250816065e-05, |
|
"loss": 0.3965, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.5913911416094823, |
|
"grad_norm": 0.06656806170940399, |
|
"learning_rate": 1.8439986796779866e-05, |
|
"loss": 0.2943, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.5926388022457891, |
|
"grad_norm": 0.07554040104150772, |
|
"learning_rate": 1.8432818297556535e-05, |
|
"loss": 0.3709, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5938864628820961, |
|
"grad_norm": 0.07638383656740189, |
|
"learning_rate": 1.84256347659247e-05, |
|
"loss": 0.3587, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.595134123518403, |
|
"grad_norm": 0.09883903712034225, |
|
"learning_rate": 1.841843621468978e-05, |
|
"loss": 0.2965, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.5963817841547099, |
|
"grad_norm": 0.09742535650730133, |
|
"learning_rate": 1.8411222656683967e-05, |
|
"loss": 0.3927, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.5976294447910169, |
|
"grad_norm": 0.08859851956367493, |
|
"learning_rate": 1.8403994104766214e-05, |
|
"loss": 0.3119, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.5988771054273238, |
|
"grad_norm": 0.08222465217113495, |
|
"learning_rate": 1.8396750571822188e-05, |
|
"loss": 0.3199, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6001247660636307, |
|
"grad_norm": 0.08288297802209854, |
|
"learning_rate": 1.838949207076427e-05, |
|
"loss": 0.3581, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.6013724266999376, |
|
"grad_norm": 0.09967238456010818, |
|
"learning_rate": 1.8382218614531518e-05, |
|
"loss": 0.4083, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.6026200873362445, |
|
"grad_norm": 0.08456137031316757, |
|
"learning_rate": 1.8374930216089663e-05, |
|
"loss": 0.3454, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.6038677479725515, |
|
"grad_norm": 0.061698053032159805, |
|
"learning_rate": 1.836762688843105e-05, |
|
"loss": 0.2286, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.6051154086088584, |
|
"grad_norm": 0.09728894382715225, |
|
"learning_rate": 1.8360308644574654e-05, |
|
"loss": 0.362, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6063630692451654, |
|
"grad_norm": 0.07975105941295624, |
|
"learning_rate": 1.8352975497566032e-05, |
|
"loss": 0.3507, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.6076107298814722, |
|
"grad_norm": 0.41635558009147644, |
|
"learning_rate": 1.8345627460477312e-05, |
|
"loss": 0.3044, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.6088583905177791, |
|
"grad_norm": 0.07460454106330872, |
|
"learning_rate": 1.833826454640716e-05, |
|
"loss": 0.2928, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.6101060511540861, |
|
"grad_norm": 0.08211305737495422, |
|
"learning_rate": 1.833088676848077e-05, |
|
"loss": 0.3005, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.611353711790393, |
|
"grad_norm": 0.22251826524734497, |
|
"learning_rate": 1.8323494139849824e-05, |
|
"loss": 0.361, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6126013724267, |
|
"grad_norm": 0.07368269562721252, |
|
"learning_rate": 1.8316086673692477e-05, |
|
"loss": 0.2952, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.6138490330630069, |
|
"grad_norm": 0.08986833691596985, |
|
"learning_rate": 1.8308664383213343e-05, |
|
"loss": 0.3222, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.6150966936993137, |
|
"grad_norm": 0.07270447164773941, |
|
"learning_rate": 1.8301227281643453e-05, |
|
"loss": 0.3357, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.6163443543356207, |
|
"grad_norm": 0.1448320597410202, |
|
"learning_rate": 1.829377538224024e-05, |
|
"loss": 0.4107, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.6175920149719276, |
|
"grad_norm": 0.08396106213331223, |
|
"learning_rate": 1.8286308698287524e-05, |
|
"loss": 0.3513, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6188396756082346, |
|
"grad_norm": 0.08713540434837341, |
|
"learning_rate": 1.827882724309547e-05, |
|
"loss": 0.3977, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.6188396756082346, |
|
"eval_loss": 0.34908154606819153, |
|
"eval_runtime": 2820.4987, |
|
"eval_samples_per_second": 0.253, |
|
"eval_steps_per_second": 0.127, |
|
"step": 496 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2403, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 248, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.2079825954766143e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|