|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.975609756097561, |
|
"eval_steps": 100, |
|
"global_step": 1400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.222031839173859, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 1.0419, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 1.111273169517517, |
|
"eval_runtime": 187.0422, |
|
"eval_samples_per_second": 216.272, |
|
"eval_steps_per_second": 27.037, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.978909941838748, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.1665, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 6.130580056448725, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.0667, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 6.089935092381693, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.1201, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.442869698529515, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.1546, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.28471945792447, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.0699, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.161065961031279, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.0906, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.4182824757873225, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.0971, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.306106150153299, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 1.0531, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.191198685577769, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.9869, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.533581010023776, |
|
"learning_rate": 2.2e-06, |
|
"loss": 0.9584, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.0578264756667672, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.0656, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.8914865841383306, |
|
"learning_rate": 2.6e-06, |
|
"loss": 0.9628, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.7786338729768105, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.9298, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.7245515964220006, |
|
"learning_rate": 3e-06, |
|
"loss": 0.9603, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.247401176863816, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.9844, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.9693650667856115, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.0114, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.5493157164665008, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.9769, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.5312837230851732, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 0.9671, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.438348358445216, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.9968, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.6205393019694059, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 0.9493, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.8998328673915148, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.9248, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.4772584854244761, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 0.9606, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.6492344751154981, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.9367, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.4492135752311766, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9681, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.9345029453812452, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.9767, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.4303746347311266, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 1.0061, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.315283188205805, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.8702, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.3343374527628737, |
|
"learning_rate": 5.8e-06, |
|
"loss": 0.9541, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.4954136888328653, |
|
"learning_rate": 6e-06, |
|
"loss": 0.9801, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.5130617654914862, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 0.9215, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.499775954243391, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.9385, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.7786154279558195, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 0.929, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.7042536424473338, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.9697, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.2627033504811447, |
|
"learning_rate": 7e-06, |
|
"loss": 0.9032, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2805435720774605, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.8599, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4840232476266473, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.9369, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4458756643238664, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.8746, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.3994007419423393, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 0.8775, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.089121632396219, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.8899, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.592387928741599, |
|
"learning_rate": 8.2e-06, |
|
"loss": 0.8429, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4183237137083025, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.9994, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.253020204127273, |
|
"learning_rate": 8.6e-06, |
|
"loss": 0.9008, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2426930310189384, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.8955, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9803308262802113, |
|
"learning_rate": 9e-06, |
|
"loss": 0.953, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2733579391179952, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.9422, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.1807015822635625, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.9207, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2510051340626769, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.8431, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2611410009201918, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 0.8627, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.588503959328431, |
|
"learning_rate": 1e-05, |
|
"loss": 0.937, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.472726233992422, |
|
"learning_rate": 1.02e-05, |
|
"loss": 0.915, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2477329691005965, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.9082, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.9798238525349643, |
|
"learning_rate": 1.0600000000000002e-05, |
|
"loss": 0.909, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.4023041981907929, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.8508, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.38789783036251, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.962, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.6046775651575935, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.8629, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.211890766417864, |
|
"learning_rate": 1.14e-05, |
|
"loss": 0.8936, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5305064382435207, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.9087, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.3858807358761336, |
|
"learning_rate": 1.18e-05, |
|
"loss": 0.8888, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.4160996675215114, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.9709, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.612657711675939, |
|
"learning_rate": 1.22e-05, |
|
"loss": 0.9553, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.563228725731034, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.9759, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.549307417934899, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 0.9167, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.6084211112800015, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.8917, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3125970489166219, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 0.83, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.5194204769356594, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.8531, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3199910396453893, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 0.9043, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3154362295691828, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.839, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.4154491863536813, |
|
"learning_rate": 1.38e-05, |
|
"loss": 0.8477, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.434434391565484, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.8615, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.4770964651688123, |
|
"learning_rate": 1.4200000000000001e-05, |
|
"loss": 0.8751, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.2822341452960506, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.8426, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.7793171841690287, |
|
"learning_rate": 1.46e-05, |
|
"loss": 0.8814, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3354120614602836, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.9254, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3501540008431716, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.8709, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.266376412273164, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.8844, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.281004813368152, |
|
"learning_rate": 1.54e-05, |
|
"loss": 0.9125, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.351899638153987, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.947, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.230662844786248, |
|
"learning_rate": 1.58e-05, |
|
"loss": 0.8643, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.248647661067115, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.8728, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.4342385839949727, |
|
"learning_rate": 1.62e-05, |
|
"loss": 0.896, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2641773466744164, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.8514, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.5090046658368796, |
|
"learning_rate": 1.66e-05, |
|
"loss": 0.8948, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.077918114752302, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.9363, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.4114006422721, |
|
"learning_rate": 1.7e-05, |
|
"loss": 0.8664, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.4095249133167944, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.8569, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.3912369318560927, |
|
"learning_rate": 1.7400000000000003e-05, |
|
"loss": 0.9265, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.591904092465942, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.8981, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.3641569225962626, |
|
"learning_rate": 1.7800000000000002e-05, |
|
"loss": 0.9682, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.7178690467888635, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.9528, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2120969466340743, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 0.8879, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.4194691633551582, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.916, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2552224627285886, |
|
"learning_rate": 1.86e-05, |
|
"loss": 0.8385, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.5744253728758688, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.9253, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.28754739044814, |
|
"learning_rate": 1.9e-05, |
|
"loss": 0.9304, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.3937275944005247, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.7894, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7203298354877872, |
|
"learning_rate": 1.94e-05, |
|
"loss": 0.8396, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.3582005168065812, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.8391, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7784040830385452, |
|
"learning_rate": 1.98e-05, |
|
"loss": 0.9852, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.8125475091606469, |
|
"learning_rate": 2e-05, |
|
"loss": 0.9179, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"eval_loss": 0.8885624408721924, |
|
"eval_runtime": 187.1128, |
|
"eval_samples_per_second": 216.19, |
|
"eval_steps_per_second": 27.026, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.583544229383965, |
|
"learning_rate": 1.9999993568531234e-05, |
|
"loss": 0.9191, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.312290801935943, |
|
"learning_rate": 1.99999742741332e-05, |
|
"loss": 0.866, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.5049576941839968, |
|
"learning_rate": 1.999994211683072e-05, |
|
"loss": 0.8443, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.449632940843496, |
|
"learning_rate": 1.9999897096665158e-05, |
|
"loss": 0.9668, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.393761122315979, |
|
"learning_rate": 1.999983921369442e-05, |
|
"loss": 0.9582, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.4243225596755649, |
|
"learning_rate": 1.9999768467992964e-05, |
|
"loss": 0.8816, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.6681837751811386, |
|
"learning_rate": 1.999968485965179e-05, |
|
"loss": 1.0217, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.6052796096952295, |
|
"learning_rate": 1.999958838877844e-05, |
|
"loss": 0.9085, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.3634731163174791, |
|
"learning_rate": 1.999947905549701e-05, |
|
"loss": 0.8114, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.563483750334544, |
|
"learning_rate": 1.9999356859948126e-05, |
|
"loss": 0.844, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.309832232231074, |
|
"learning_rate": 1.999922180228897e-05, |
|
"loss": 0.8668, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.5223471077188282, |
|
"learning_rate": 1.999907388269327e-05, |
|
"loss": 0.8465, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.3448768039546044, |
|
"learning_rate": 1.999891310135129e-05, |
|
"loss": 0.96, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.4862383791563054, |
|
"learning_rate": 1.9998739458469846e-05, |
|
"loss": 0.9533, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.2017931806414803, |
|
"learning_rate": 1.999855295427229e-05, |
|
"loss": 0.8375, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.2583376428869173, |
|
"learning_rate": 1.999835358899852e-05, |
|
"loss": 0.805, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.2260058170108115, |
|
"learning_rate": 1.9998141362904978e-05, |
|
"loss": 0.8608, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.437771413281763, |
|
"learning_rate": 1.999791627626466e-05, |
|
"loss": 0.886, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.2184693792099177, |
|
"learning_rate": 1.999767832936708e-05, |
|
"loss": 0.8407, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.306478784713779, |
|
"learning_rate": 1.9997427522518315e-05, |
|
"loss": 0.8752, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1534611511399118, |
|
"learning_rate": 1.9997163856040974e-05, |
|
"loss": 0.932, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3792596997556017, |
|
"learning_rate": 1.999688733027421e-05, |
|
"loss": 0.923, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.8415988707474704, |
|
"learning_rate": 1.9996597945573714e-05, |
|
"loss": 0.8956, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3351124075493161, |
|
"learning_rate": 1.9996295702311724e-05, |
|
"loss": 0.9524, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3103430169447072, |
|
"learning_rate": 1.999598060087701e-05, |
|
"loss": 0.9206, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3641483437726287, |
|
"learning_rate": 1.999565264167489e-05, |
|
"loss": 0.8577, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3543902897202786, |
|
"learning_rate": 1.9995311825127206e-05, |
|
"loss": 0.8494, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1381528151420175, |
|
"learning_rate": 1.9994958151672358e-05, |
|
"loss": 0.8701, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.4344875566309996, |
|
"learning_rate": 1.9994591621765272e-05, |
|
"loss": 0.8702, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1956730998726899, |
|
"learning_rate": 1.9994212235877407e-05, |
|
"loss": 0.9015, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.2029518457227866, |
|
"learning_rate": 1.9993819994496776e-05, |
|
"loss": 0.9442, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1604274764251628, |
|
"learning_rate": 1.9993414898127905e-05, |
|
"loss": 0.8042, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3038943540300167, |
|
"learning_rate": 1.9992996947291876e-05, |
|
"loss": 0.9018, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1960860187114335, |
|
"learning_rate": 1.999256614252629e-05, |
|
"loss": 0.9428, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3913155509430453, |
|
"learning_rate": 1.999212248438529e-05, |
|
"loss": 0.9788, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3052468844211198, |
|
"learning_rate": 1.999166597343955e-05, |
|
"loss": 0.9086, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.7145477455732823, |
|
"learning_rate": 1.9991196610276283e-05, |
|
"loss": 0.9369, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.3431672968207697, |
|
"learning_rate": 1.9990714395499222e-05, |
|
"loss": 0.9278, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.1560484700856692, |
|
"learning_rate": 1.999021932972864e-05, |
|
"loss": 0.871, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.137982958645585, |
|
"learning_rate": 1.9989711413601332e-05, |
|
"loss": 0.7911, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.41243915234061, |
|
"learning_rate": 1.9989190647770633e-05, |
|
"loss": 1.0343, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.6313054425561822, |
|
"learning_rate": 1.99886570329064e-05, |
|
"loss": 0.9113, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1822708557044925, |
|
"learning_rate": 1.9988110569695017e-05, |
|
"loss": 0.8441, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1087613022251992, |
|
"learning_rate": 1.9987551258839392e-05, |
|
"loss": 0.8759, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1203723621386166, |
|
"learning_rate": 1.9986979101058972e-05, |
|
"loss": 0.8872, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.5757934934202387, |
|
"learning_rate": 1.9986394097089714e-05, |
|
"loss": 0.8804, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1456220385730522, |
|
"learning_rate": 1.998579624768411e-05, |
|
"loss": 0.7979, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.097480086108459, |
|
"learning_rate": 1.998518555361116e-05, |
|
"loss": 0.8817, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.2141863942408255, |
|
"learning_rate": 1.9984562015656406e-05, |
|
"loss": 0.8688, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.165899291397342, |
|
"learning_rate": 1.9983925634621894e-05, |
|
"loss": 0.8389, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1224242061916174, |
|
"learning_rate": 1.9983276411326206e-05, |
|
"loss": 0.8456, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1718864519831262, |
|
"learning_rate": 1.9982614346604424e-05, |
|
"loss": 0.9252, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1633450486139567, |
|
"learning_rate": 1.9981939441308166e-05, |
|
"loss": 0.9022, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.8675156572488367, |
|
"learning_rate": 1.998125169630555e-05, |
|
"loss": 0.9143, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.3275288997192118, |
|
"learning_rate": 1.9980551112481224e-05, |
|
"loss": 0.975, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1366645435061968, |
|
"learning_rate": 1.997983769073634e-05, |
|
"loss": 0.9066, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.2539240061165282, |
|
"learning_rate": 1.9979111431988575e-05, |
|
"loss": 0.8686, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1830638013438253, |
|
"learning_rate": 1.997837233717211e-05, |
|
"loss": 0.9105, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.2289851798766707, |
|
"learning_rate": 1.9977620407237627e-05, |
|
"loss": 0.97, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1894635911968074, |
|
"learning_rate": 1.997685564315234e-05, |
|
"loss": 0.941, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.439765259122199, |
|
"learning_rate": 1.997607804589996e-05, |
|
"loss": 1.0221, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1495993551137769, |
|
"learning_rate": 1.9975287616480702e-05, |
|
"loss": 0.8823, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.2383675419211015, |
|
"learning_rate": 1.9974484355911287e-05, |
|
"loss": 0.8684, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.219442395631165, |
|
"learning_rate": 1.997366826522495e-05, |
|
"loss": 0.9084, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.2050137099782028, |
|
"learning_rate": 1.9972839345471423e-05, |
|
"loss": 0.8461, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1610243840319072, |
|
"learning_rate": 1.997199759771694e-05, |
|
"loss": 0.843, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1154404343775486, |
|
"learning_rate": 1.997114302304423e-05, |
|
"loss": 0.8247, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.194607031155807, |
|
"learning_rate": 1.9970275622552536e-05, |
|
"loss": 0.8432, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.4022970990428032, |
|
"learning_rate": 1.9969395397357584e-05, |
|
"loss": 0.9016, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1858220432102802, |
|
"learning_rate": 1.99685023485916e-05, |
|
"loss": 0.9437, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1155321667076121, |
|
"learning_rate": 1.9967596477403318e-05, |
|
"loss": 0.8602, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2905373990572484, |
|
"learning_rate": 1.996667778495794e-05, |
|
"loss": 0.9333, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1931787552579112, |
|
"learning_rate": 1.9965746272437187e-05, |
|
"loss": 0.7826, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2630219016898117, |
|
"learning_rate": 1.996480194103925e-05, |
|
"loss": 0.8185, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.204987733919362, |
|
"learning_rate": 1.9963844791978818e-05, |
|
"loss": 0.8742, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2437041912683955, |
|
"learning_rate": 1.996287482648707e-05, |
|
"loss": 0.9834, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.254870155842108, |
|
"learning_rate": 1.9961892045811663e-05, |
|
"loss": 0.8836, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2090034933034504, |
|
"learning_rate": 1.9960896451216738e-05, |
|
"loss": 0.8667, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1598314715618245, |
|
"learning_rate": 1.9959888043982927e-05, |
|
"loss": 0.8849, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.252477055176123, |
|
"learning_rate": 1.995886682540734e-05, |
|
"loss": 0.8851, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1256837247964426, |
|
"learning_rate": 1.995783279680356e-05, |
|
"loss": 0.8993, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2572839567534193, |
|
"learning_rate": 1.995678595950165e-05, |
|
"loss": 0.8838, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2788480841418937, |
|
"learning_rate": 1.9955726314848154e-05, |
|
"loss": 0.9657, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4351399334715638, |
|
"learning_rate": 1.9954653864206088e-05, |
|
"loss": 0.8965, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.148517952301927, |
|
"learning_rate": 1.9953568608954934e-05, |
|
"loss": 0.9614, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.0899591202732373, |
|
"learning_rate": 1.995247055049065e-05, |
|
"loss": 1.0451, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1956254586029718, |
|
"learning_rate": 1.9951359690225662e-05, |
|
"loss": 0.8135, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1207108959665357, |
|
"learning_rate": 1.9950236029588863e-05, |
|
"loss": 0.8822, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1899112755126982, |
|
"learning_rate": 1.9949099570025613e-05, |
|
"loss": 0.869, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2114377382527095, |
|
"learning_rate": 1.994795031299773e-05, |
|
"loss": 0.8621, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1290548904860946, |
|
"learning_rate": 1.9946788259983493e-05, |
|
"loss": 0.9085, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.101382636702055, |
|
"learning_rate": 1.9945613412477652e-05, |
|
"loss": 0.8717, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2652298684318162, |
|
"learning_rate": 1.99444257719914e-05, |
|
"loss": 0.914, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.1661768680774713, |
|
"learning_rate": 1.9943225340052392e-05, |
|
"loss": 1.0426, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3501980135490719, |
|
"learning_rate": 1.9942012118204738e-05, |
|
"loss": 0.8917, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2049018991454707, |
|
"learning_rate": 1.9940786108009e-05, |
|
"loss": 0.8631, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.215922883123164, |
|
"learning_rate": 1.9939547311042177e-05, |
|
"loss": 0.9292, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.1267422886076772, |
|
"learning_rate": 1.9938295728897735e-05, |
|
"loss": 0.8138, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.1497998538174055, |
|
"learning_rate": 1.9937031363185573e-05, |
|
"loss": 0.8412, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3726576421968895, |
|
"learning_rate": 1.993575421553204e-05, |
|
"loss": 1.0123, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 0.8822317719459534, |
|
"eval_runtime": 187.1914, |
|
"eval_samples_per_second": 216.1, |
|
"eval_steps_per_second": 27.015, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2637805473747161, |
|
"learning_rate": 1.9934464287579922e-05, |
|
"loss": 0.9358, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2221016247650263, |
|
"learning_rate": 1.9933161580988444e-05, |
|
"loss": 0.8732, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2362316473986823, |
|
"learning_rate": 1.9931846097433265e-05, |
|
"loss": 0.8965, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3892342291259105, |
|
"learning_rate": 1.9930517838606492e-05, |
|
"loss": 0.8785, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2568068018376466, |
|
"learning_rate": 1.992917680621665e-05, |
|
"loss": 0.8879, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.2887830429527976, |
|
"learning_rate": 1.9927823001988703e-05, |
|
"loss": 0.8982, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.0952878042078324, |
|
"learning_rate": 1.9926456427664038e-05, |
|
"loss": 0.8321, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.1488105022621649, |
|
"learning_rate": 1.9925077085000475e-05, |
|
"loss": 0.8938, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.2001692579551484, |
|
"learning_rate": 1.992368497577225e-05, |
|
"loss": 0.9604, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5520591983316103, |
|
"learning_rate": 1.992228010177003e-05, |
|
"loss": 0.8965, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.1538845325628375, |
|
"learning_rate": 1.992086246480089e-05, |
|
"loss": 0.9079, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3086118735118826, |
|
"learning_rate": 1.991943206668833e-05, |
|
"loss": 0.8822, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3066154322169896, |
|
"learning_rate": 1.9917988909272263e-05, |
|
"loss": 0.9422, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3788975402648815, |
|
"learning_rate": 1.9916532994409012e-05, |
|
"loss": 0.8661, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.157353475407855, |
|
"learning_rate": 1.991506432397131e-05, |
|
"loss": 0.8708, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.0668946879406427, |
|
"learning_rate": 1.99135828998483e-05, |
|
"loss": 0.8281, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.0586925383296883, |
|
"learning_rate": 1.9912088723945527e-05, |
|
"loss": 0.8748, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.2243244443889567, |
|
"learning_rate": 1.9910581798184947e-05, |
|
"loss": 0.8876, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.153241330865434, |
|
"learning_rate": 1.99090621245049e-05, |
|
"loss": 1.0029, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3289456136734883, |
|
"learning_rate": 1.990752970486014e-05, |
|
"loss": 0.9331, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.1658780585501232, |
|
"learning_rate": 1.99059845412218e-05, |
|
"loss": 0.8539, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.0841474719145574, |
|
"learning_rate": 1.9904426635577426e-05, |
|
"loss": 0.7989, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2100420613683809, |
|
"learning_rate": 1.990285598993093e-05, |
|
"loss": 0.9099, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2327481765852046, |
|
"learning_rate": 1.9901272606302635e-05, |
|
"loss": 0.8782, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1374571970583345, |
|
"learning_rate": 1.9899676486729227e-05, |
|
"loss": 0.9215, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2454011716677853, |
|
"learning_rate": 1.9898067633263795e-05, |
|
"loss": 0.9805, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.051858806753117, |
|
"learning_rate": 1.989644604797579e-05, |
|
"loss": 0.8915, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.6709690832913764, |
|
"learning_rate": 1.989481173295105e-05, |
|
"loss": 0.9316, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2921955310126267, |
|
"learning_rate": 1.9893164690291783e-05, |
|
"loss": 0.911, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.4306151791171935, |
|
"learning_rate": 1.9891504922116572e-05, |
|
"loss": 0.8217, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.4490313892439401, |
|
"learning_rate": 1.988983243056036e-05, |
|
"loss": 0.9171, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2261731033069914, |
|
"learning_rate": 1.988814721777447e-05, |
|
"loss": 0.857, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1936230768093887, |
|
"learning_rate": 1.988644928592658e-05, |
|
"loss": 0.9604, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2805860141107388, |
|
"learning_rate": 1.988473863720072e-05, |
|
"loss": 0.8911, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2391969410884403, |
|
"learning_rate": 1.9883015273797302e-05, |
|
"loss": 0.8648, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1576939443779746, |
|
"learning_rate": 1.988127919793306e-05, |
|
"loss": 0.8933, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3904763872990524, |
|
"learning_rate": 1.9879530411841114e-05, |
|
"loss": 0.8738, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.6162138215699062, |
|
"learning_rate": 1.987776891777091e-05, |
|
"loss": 0.8614, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.3562904123393382, |
|
"learning_rate": 1.9875994717988238e-05, |
|
"loss": 0.8795, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.5730263292883433, |
|
"learning_rate": 1.9874207814775252e-05, |
|
"loss": 0.9134, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.7831810147386489, |
|
"learning_rate": 1.9872408210430433e-05, |
|
"loss": 0.8516, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1832426844117234, |
|
"learning_rate": 1.9870595907268596e-05, |
|
"loss": 0.8199, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.350941258232983, |
|
"learning_rate": 1.98687709076209e-05, |
|
"loss": 0.9702, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2867689498331254, |
|
"learning_rate": 1.9866933213834824e-05, |
|
"loss": 0.8643, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.4677460519503693, |
|
"learning_rate": 1.986508282827419e-05, |
|
"loss": 0.8778, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4363310224409451, |
|
"learning_rate": 1.9863219753319135e-05, |
|
"loss": 0.8522, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1699677293775574, |
|
"learning_rate": 1.9861343991366115e-05, |
|
"loss": 0.802, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2924807238317337, |
|
"learning_rate": 1.9859455544827918e-05, |
|
"loss": 0.9509, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2521477373576704, |
|
"learning_rate": 1.9857554416133638e-05, |
|
"loss": 0.8525, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1517606654171464, |
|
"learning_rate": 1.9855640607728684e-05, |
|
"loss": 0.8671, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1846979830932318, |
|
"learning_rate": 1.985371412207478e-05, |
|
"loss": 0.8872, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0494510402617383, |
|
"learning_rate": 1.9851774961649947e-05, |
|
"loss": 0.8404, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2000573806870802, |
|
"learning_rate": 1.984982312894852e-05, |
|
"loss": 0.858, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2135277830431934, |
|
"learning_rate": 1.984785862648112e-05, |
|
"loss": 0.8937, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.1101162005595753, |
|
"learning_rate": 1.984588145677469e-05, |
|
"loss": 0.8475, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2142650007241018, |
|
"learning_rate": 1.9843891622372434e-05, |
|
"loss": 0.9388, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1822666855469028, |
|
"learning_rate": 1.9841889125833877e-05, |
|
"loss": 0.8937, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.5588956230747333, |
|
"learning_rate": 1.9839873969734813e-05, |
|
"loss": 0.8999, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2813736208840643, |
|
"learning_rate": 1.983784615666732e-05, |
|
"loss": 1.0269, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0937396851014907, |
|
"learning_rate": 1.983580568923977e-05, |
|
"loss": 0.8552, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0789428924402067, |
|
"learning_rate": 1.9833752570076794e-05, |
|
"loss": 0.8626, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0716959865947704, |
|
"learning_rate": 1.983168680181931e-05, |
|
"loss": 0.9014, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5586891426715053, |
|
"learning_rate": 1.9829608387124512e-05, |
|
"loss": 0.8257, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.159299773943287, |
|
"learning_rate": 1.9827517328665835e-05, |
|
"loss": 0.9158, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.614928320766928, |
|
"learning_rate": 1.9825413629133007e-05, |
|
"loss": 0.905, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2759728656235267, |
|
"learning_rate": 1.9823297291232003e-05, |
|
"loss": 0.8884, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.130197334786531, |
|
"learning_rate": 1.9821168317685048e-05, |
|
"loss": 0.9401, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1071953972918522, |
|
"learning_rate": 1.981902671123063e-05, |
|
"loss": 0.8998, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.4143214889252662, |
|
"learning_rate": 1.981687247462349e-05, |
|
"loss": 1.0354, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1936018368917338, |
|
"learning_rate": 1.9814705610634602e-05, |
|
"loss": 0.8934, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.9588028448015303, |
|
"learning_rate": 1.981252612205119e-05, |
|
"loss": 0.7949, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0367090915141624, |
|
"learning_rate": 1.981033401167672e-05, |
|
"loss": 0.85, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1632134055825956, |
|
"learning_rate": 1.9808129282330895e-05, |
|
"loss": 0.8961, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1644937246368534, |
|
"learning_rate": 1.980591193684963e-05, |
|
"loss": 0.9055, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1695602344439167, |
|
"learning_rate": 1.9803681978085095e-05, |
|
"loss": 0.9705, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0729503600381038, |
|
"learning_rate": 1.9801439408905663e-05, |
|
"loss": 0.8347, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0999879962808221, |
|
"learning_rate": 1.9799184232195947e-05, |
|
"loss": 0.9158, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0865411615717533, |
|
"learning_rate": 1.979691645085676e-05, |
|
"loss": 0.8844, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.012147198379429, |
|
"learning_rate": 1.9794636067805137e-05, |
|
"loss": 0.9223, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.412178875932858, |
|
"learning_rate": 1.9792343085974316e-05, |
|
"loss": 0.9005, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1197488002533464, |
|
"learning_rate": 1.979003750831375e-05, |
|
"loss": 0.8507, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2057779133550202, |
|
"learning_rate": 1.978771933778909e-05, |
|
"loss": 0.9847, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1213946217031967, |
|
"learning_rate": 1.9785388577382183e-05, |
|
"loss": 0.824, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1036782471126692, |
|
"learning_rate": 1.978304523009107e-05, |
|
"loss": 0.8095, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0945851242489233, |
|
"learning_rate": 1.9780689298929984e-05, |
|
"loss": 0.8134, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.3077751312491577, |
|
"learning_rate": 1.9778320786929347e-05, |
|
"loss": 0.9936, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2408368443934399, |
|
"learning_rate": 1.9775939697135758e-05, |
|
"loss": 0.9111, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0957840590624228, |
|
"learning_rate": 1.9773546032611997e-05, |
|
"loss": 0.8965, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0147868519584988, |
|
"learning_rate": 1.9771139796437028e-05, |
|
"loss": 0.7851, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0973117222342668, |
|
"learning_rate": 1.976872099170597e-05, |
|
"loss": 0.8744, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.3287791694807247, |
|
"learning_rate": 1.9766289621530118e-05, |
|
"loss": 0.8534, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.133246313776563, |
|
"learning_rate": 1.9763845689036927e-05, |
|
"loss": 0.8312, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1419361652462952, |
|
"learning_rate": 1.9761389197370014e-05, |
|
"loss": 0.8366, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0821551951711113, |
|
"learning_rate": 1.9758920149689146e-05, |
|
"loss": 0.8487, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1556389890148893, |
|
"learning_rate": 1.975643854917025e-05, |
|
"loss": 0.8578, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2067794152264784, |
|
"learning_rate": 1.9753944399005388e-05, |
|
"loss": 0.8771, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1417102044118401, |
|
"learning_rate": 1.9751437702402773e-05, |
|
"loss": 0.9566, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.7660938523689576, |
|
"learning_rate": 1.9748918462586752e-05, |
|
"loss": 0.876, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1220879011792553, |
|
"learning_rate": 1.9746386682797802e-05, |
|
"loss": 0.8612, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.0816131747151747, |
|
"learning_rate": 1.9743842366292544e-05, |
|
"loss": 0.9106, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"eval_loss": 0.8701280951499939, |
|
"eval_runtime": 187.4205, |
|
"eval_samples_per_second": 215.836, |
|
"eval_steps_per_second": 26.982, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.0299750746890193, |
|
"learning_rate": 1.974128551634371e-05, |
|
"loss": 0.8134, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.0355435348681352, |
|
"learning_rate": 1.9738716136240166e-05, |
|
"loss": 0.8523, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2446494879448189, |
|
"learning_rate": 1.9736134229286884e-05, |
|
"loss": 0.9613, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2659564391872702, |
|
"learning_rate": 1.9733539798804958e-05, |
|
"loss": 0.8897, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2668653568603956, |
|
"learning_rate": 1.9730932848131585e-05, |
|
"loss": 0.8957, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1810413523703902, |
|
"learning_rate": 1.9728313380620073e-05, |
|
"loss": 0.8298, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1504498545031272, |
|
"learning_rate": 1.9725681399639826e-05, |
|
"loss": 0.9032, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1109923815534024, |
|
"learning_rate": 1.9723036908576344e-05, |
|
"loss": 0.8851, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1153797340529337, |
|
"learning_rate": 1.972037991083122e-05, |
|
"loss": 0.9025, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.0547112750314187, |
|
"learning_rate": 1.971771040982213e-05, |
|
"loss": 0.867, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.9896043321667425, |
|
"learning_rate": 1.971502840898284e-05, |
|
"loss": 0.8251, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.0919583168522662, |
|
"learning_rate": 1.9712333911763186e-05, |
|
"loss": 0.9165, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2904601705665253, |
|
"learning_rate": 1.970962692162909e-05, |
|
"loss": 0.8716, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.0305914092123785, |
|
"learning_rate": 1.9706907442062536e-05, |
|
"loss": 0.8143, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1005551182409805, |
|
"learning_rate": 1.9704175476561568e-05, |
|
"loss": 0.8313, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1201052731592884, |
|
"learning_rate": 1.97014310286403e-05, |
|
"loss": 0.8986, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.046546723321121, |
|
"learning_rate": 1.9698674101828895e-05, |
|
"loss": 0.9152, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.018976681773729, |
|
"learning_rate": 1.969590469967358e-05, |
|
"loss": 0.8272, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.9825109312046809, |
|
"learning_rate": 1.9693122825736607e-05, |
|
"loss": 0.8734, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.027432613849138, |
|
"learning_rate": 1.9690328483596287e-05, |
|
"loss": 0.8852, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1532549677069504, |
|
"learning_rate": 1.9687521676846974e-05, |
|
"loss": 0.8848, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1500913753153383, |
|
"learning_rate": 1.968470240909903e-05, |
|
"loss": 0.9482, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0861728022209993, |
|
"learning_rate": 1.9681870683978878e-05, |
|
"loss": 0.885, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1508385225727733, |
|
"learning_rate": 1.967902650512894e-05, |
|
"loss": 0.8525, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1091611254738105, |
|
"learning_rate": 1.9676169876207664e-05, |
|
"loss": 0.9763, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.012395214833452, |
|
"learning_rate": 1.9673300800889513e-05, |
|
"loss": 1.0059, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0709485032246597, |
|
"learning_rate": 1.9670419282864968e-05, |
|
"loss": 0.8361, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.067182605716564, |
|
"learning_rate": 1.96675253258405e-05, |
|
"loss": 0.8886, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0898056998348664, |
|
"learning_rate": 1.9664618933538593e-05, |
|
"loss": 0.8371, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0874145605191532, |
|
"learning_rate": 1.9661700109697718e-05, |
|
"loss": 0.9242, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2537141356402897, |
|
"learning_rate": 1.9658768858072346e-05, |
|
"loss": 0.9072, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.1244052821739037, |
|
"learning_rate": 1.965582518243292e-05, |
|
"loss": 0.9079, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5132621278229172, |
|
"learning_rate": 1.9652869086565874e-05, |
|
"loss": 0.8279, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1445681558432512, |
|
"learning_rate": 1.9649900574273616e-05, |
|
"loss": 0.8482, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.801049653373737, |
|
"learning_rate": 1.9646919649374524e-05, |
|
"loss": 0.8787, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1852812751336075, |
|
"learning_rate": 1.9643926315702947e-05, |
|
"loss": 0.9122, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1555348167729393, |
|
"learning_rate": 1.9640920577109183e-05, |
|
"loss": 0.8472, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.4430203433228617, |
|
"learning_rate": 1.9637902437459506e-05, |
|
"loss": 0.9194, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0544540038634749, |
|
"learning_rate": 1.9634871900636124e-05, |
|
"loss": 0.9071, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1435050397743178, |
|
"learning_rate": 1.9631828970537196e-05, |
|
"loss": 0.8863, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.268170090315426, |
|
"learning_rate": 1.9628773651076825e-05, |
|
"loss": 1.0548, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0486670138134782, |
|
"learning_rate": 1.9625705946185053e-05, |
|
"loss": 0.8845, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1109602735025481, |
|
"learning_rate": 1.9622625859807843e-05, |
|
"loss": 0.8888, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0327094310140685, |
|
"learning_rate": 1.96195333959071e-05, |
|
"loss": 0.8448, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 12.81851288731012, |
|
"learning_rate": 1.9616428558460634e-05, |
|
"loss": 0.8091, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.3904475342636298, |
|
"learning_rate": 1.961331135146218e-05, |
|
"loss": 0.8578, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0316055568361129, |
|
"learning_rate": 1.961018177892138e-05, |
|
"loss": 0.8227, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.301685178122554, |
|
"learning_rate": 1.9607039844863788e-05, |
|
"loss": 0.9027, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.1850865906075192, |
|
"learning_rate": 1.960388555333085e-05, |
|
"loss": 0.8745, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.171726269820577, |
|
"learning_rate": 1.960071890837991e-05, |
|
"loss": 0.8424, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2171194717004472, |
|
"learning_rate": 1.959753991408421e-05, |
|
"loss": 0.9305, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2097775475792434, |
|
"learning_rate": 1.9594348574532868e-05, |
|
"loss": 0.7946, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1659533100233976, |
|
"learning_rate": 1.9591144893830884e-05, |
|
"loss": 0.9041, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.361702782618829, |
|
"learning_rate": 1.9587928876099126e-05, |
|
"loss": 0.8343, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2483023246254354, |
|
"learning_rate": 1.9584700525474347e-05, |
|
"loss": 0.8595, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1656310027795045, |
|
"learning_rate": 1.9581459846109153e-05, |
|
"loss": 0.8499, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2008528979815898, |
|
"learning_rate": 1.9578206842172003e-05, |
|
"loss": 0.9706, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1352069526109516, |
|
"learning_rate": 1.9574941517847223e-05, |
|
"loss": 0.8912, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1495509789253435, |
|
"learning_rate": 1.9571663877334975e-05, |
|
"loss": 0.8749, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1074439603505601, |
|
"learning_rate": 1.9568373924851267e-05, |
|
"loss": 0.9433, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2169160927046383, |
|
"learning_rate": 1.9565071664627947e-05, |
|
"loss": 0.9967, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1445655760646407, |
|
"learning_rate": 1.9561757100912692e-05, |
|
"loss": 0.9123, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.034825339597779, |
|
"learning_rate": 1.9558430237968998e-05, |
|
"loss": 0.8477, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1684005466502694, |
|
"learning_rate": 1.95550910800762e-05, |
|
"loss": 0.9228, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0611745934297807, |
|
"learning_rate": 1.9551739631529427e-05, |
|
"loss": 0.8413, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.164478242612286, |
|
"learning_rate": 1.9548375896639627e-05, |
|
"loss": 0.8819, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.069365191756458, |
|
"learning_rate": 1.9544999879733556e-05, |
|
"loss": 0.9306, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.2354599047635832, |
|
"learning_rate": 1.9541611585153758e-05, |
|
"loss": 0.846, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.051996239556798, |
|
"learning_rate": 1.9538211017258573e-05, |
|
"loss": 0.8362, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0239770659963459, |
|
"learning_rate": 1.953479818042214e-05, |
|
"loss": 0.8912, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0915734894988451, |
|
"learning_rate": 1.953137307903436e-05, |
|
"loss": 0.8258, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.4240207885170275, |
|
"learning_rate": 1.952793571750093e-05, |
|
"loss": 0.9152, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1917328080665825, |
|
"learning_rate": 1.9524486100243297e-05, |
|
"loss": 0.8451, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5484400558582907, |
|
"learning_rate": 1.9521024231698684e-05, |
|
"loss": 0.8355, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1437245836456504, |
|
"learning_rate": 1.9517550116320074e-05, |
|
"loss": 0.963, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1576928591561433, |
|
"learning_rate": 1.9514063758576198e-05, |
|
"loss": 0.8982, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.2350762795576822, |
|
"learning_rate": 1.9510565162951538e-05, |
|
"loss": 0.8619, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.013440484146166, |
|
"learning_rate": 1.9507054333946313e-05, |
|
"loss": 0.8565, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1573947595135938, |
|
"learning_rate": 1.9503531276076483e-05, |
|
"loss": 0.883, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0506730903652184, |
|
"learning_rate": 1.949999599387373e-05, |
|
"loss": 0.8537, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2671573201994177, |
|
"learning_rate": 1.9496448491885473e-05, |
|
"loss": 0.9342, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1837608740018646, |
|
"learning_rate": 1.9492888774674837e-05, |
|
"loss": 0.8467, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.12550000000519, |
|
"learning_rate": 1.9489316846820668e-05, |
|
"loss": 0.8109, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0726587171258777, |
|
"learning_rate": 1.9485732712917507e-05, |
|
"loss": 0.7991, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2691153809839144, |
|
"learning_rate": 1.9482136377575612e-05, |
|
"loss": 1.0423, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0637626583859996, |
|
"learning_rate": 1.9478527845420915e-05, |
|
"loss": 0.8993, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0884178299273755, |
|
"learning_rate": 1.9474907121095064e-05, |
|
"loss": 0.8942, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0391015249601647, |
|
"learning_rate": 1.947127420925536e-05, |
|
"loss": 0.8135, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1516781892264725, |
|
"learning_rate": 1.9467629114574805e-05, |
|
"loss": 0.8714, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1314783254435155, |
|
"learning_rate": 1.9463971841742057e-05, |
|
"loss": 0.9036, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.025454709514132, |
|
"learning_rate": 1.9460302395461437e-05, |
|
"loss": 0.8731, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2270327905246061, |
|
"learning_rate": 1.9456620780452943e-05, |
|
"loss": 0.8698, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0616296964610366, |
|
"learning_rate": 1.9452927001452203e-05, |
|
"loss": 0.7885, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.151475949060371, |
|
"learning_rate": 1.9449221063210512e-05, |
|
"loss": 0.8256, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.423361234847695, |
|
"learning_rate": 1.9445502970494787e-05, |
|
"loss": 0.8914, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1433125351711972, |
|
"learning_rate": 1.944177272808759e-05, |
|
"loss": 0.8687, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.184275686610942, |
|
"learning_rate": 1.943803034078711e-05, |
|
"loss": 0.7931, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0661356068578263, |
|
"learning_rate": 1.943427581340715e-05, |
|
"loss": 0.7678, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2228382448553605, |
|
"learning_rate": 1.9430509150777144e-05, |
|
"loss": 0.8018, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.88748184451058, |
|
"learning_rate": 1.9426730357742123e-05, |
|
"loss": 0.8992, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 0.8636618852615356, |
|
"eval_runtime": 187.3213, |
|
"eval_samples_per_second": 215.95, |
|
"eval_steps_per_second": 26.996, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2624347636274722, |
|
"learning_rate": 1.9422939439162724e-05, |
|
"loss": 0.9155, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1698553557184548, |
|
"learning_rate": 1.941913639991518e-05, |
|
"loss": 0.86, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2896541603131255, |
|
"learning_rate": 1.9415321244891324e-05, |
|
"loss": 0.8662, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0328968599567645, |
|
"learning_rate": 1.9411493978998556e-05, |
|
"loss": 0.8603, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1164191505312102, |
|
"learning_rate": 1.9407654607159872e-05, |
|
"loss": 0.8241, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0333308388183566, |
|
"learning_rate": 1.940380313431383e-05, |
|
"loss": 0.9102, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2102188125348146, |
|
"learning_rate": 1.9399939565414554e-05, |
|
"loss": 0.8601, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0252791341841436, |
|
"learning_rate": 1.939606390543173e-05, |
|
"loss": 0.9156, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0237005282287415, |
|
"learning_rate": 1.9392176159350592e-05, |
|
"loss": 0.8721, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.179791811608601, |
|
"learning_rate": 1.938827633217193e-05, |
|
"loss": 0.8506, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.2083149029506115, |
|
"learning_rate": 1.938436442891206e-05, |
|
"loss": 0.9396, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.304928466464715, |
|
"learning_rate": 1.938044045460284e-05, |
|
"loss": 0.848, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.245493747578827, |
|
"learning_rate": 1.9376504414291662e-05, |
|
"loss": 1.016, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.435385045917715, |
|
"learning_rate": 1.9372556313041423e-05, |
|
"loss": 1.0092, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0903396597636805, |
|
"learning_rate": 1.9368596155930538e-05, |
|
"loss": 0.7966, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1738401247123667, |
|
"learning_rate": 1.936462394805294e-05, |
|
"loss": 0.8328, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0633958378072337, |
|
"learning_rate": 1.9360639694518047e-05, |
|
"loss": 0.8777, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0396714883459242, |
|
"learning_rate": 1.935664340045079e-05, |
|
"loss": 0.7648, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0106668472141556, |
|
"learning_rate": 1.9352635070991567e-05, |
|
"loss": 0.8712, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1381217667885855, |
|
"learning_rate": 1.934861471129627e-05, |
|
"loss": 0.8539, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1151430876376536, |
|
"learning_rate": 1.9344582326536265e-05, |
|
"loss": 0.8892, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.371300072001641, |
|
"learning_rate": 1.934053792189838e-05, |
|
"loss": 0.848, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.068226316452399, |
|
"learning_rate": 1.9336481502584914e-05, |
|
"loss": 0.858, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0487313154852826, |
|
"learning_rate": 1.9332413073813606e-05, |
|
"loss": 0.8205, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0254282471589573, |
|
"learning_rate": 1.9328332640817657e-05, |
|
"loss": 0.86, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0351254744835727, |
|
"learning_rate": 1.9324240208845694e-05, |
|
"loss": 0.9067, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0150706385574908, |
|
"learning_rate": 1.9320135783161795e-05, |
|
"loss": 0.7885, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0164480002903071, |
|
"learning_rate": 1.9316019369045453e-05, |
|
"loss": 0.8156, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0441180688201646, |
|
"learning_rate": 1.9311890971791586e-05, |
|
"loss": 0.8825, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.044772972990732, |
|
"learning_rate": 1.930775059671053e-05, |
|
"loss": 0.7897, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9632658012924297, |
|
"learning_rate": 1.9303598249128016e-05, |
|
"loss": 0.7772, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0341143803884538, |
|
"learning_rate": 1.929943393438519e-05, |
|
"loss": 0.9062, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.094263199803778, |
|
"learning_rate": 1.9295257657838578e-05, |
|
"loss": 0.859, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.643732102287827, |
|
"learning_rate": 1.9291069424860103e-05, |
|
"loss": 0.9486, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1173987376473988, |
|
"learning_rate": 1.928686924083706e-05, |
|
"loss": 0.8217, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.029920328225798, |
|
"learning_rate": 1.9282657111172122e-05, |
|
"loss": 0.898, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.095170152800852, |
|
"learning_rate": 1.927843304128332e-05, |
|
"loss": 0.8778, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1961572487379104, |
|
"learning_rate": 1.9274197036604056e-05, |
|
"loss": 0.9524, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1804576808566998, |
|
"learning_rate": 1.9269949102583073e-05, |
|
"loss": 0.8683, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9733805716493049, |
|
"learning_rate": 1.926568924468446e-05, |
|
"loss": 0.8669, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0706964473279539, |
|
"learning_rate": 1.926141746838765e-05, |
|
"loss": 0.8367, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0033805905176616, |
|
"learning_rate": 1.9257133779187396e-05, |
|
"loss": 0.7884, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1611745396135442, |
|
"learning_rate": 1.9252838182593785e-05, |
|
"loss": 0.8934, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.954029454505071, |
|
"learning_rate": 1.9248530684132217e-05, |
|
"loss": 0.842, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1689253660362962, |
|
"learning_rate": 1.9244211289343397e-05, |
|
"loss": 0.8226, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0553358851077157, |
|
"learning_rate": 1.923988000378334e-05, |
|
"loss": 0.8474, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2187486629066102, |
|
"learning_rate": 1.9235536833023345e-05, |
|
"loss": 0.8575, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0348731817900605, |
|
"learning_rate": 1.923118178265001e-05, |
|
"loss": 0.8526, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0842630814194327, |
|
"learning_rate": 1.922681485826521e-05, |
|
"loss": 0.9687, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.113876223105261, |
|
"learning_rate": 1.922243606548609e-05, |
|
"loss": 0.891, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1371794908578412, |
|
"learning_rate": 1.9218045409945066e-05, |
|
"loss": 0.8671, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1093753268810582, |
|
"learning_rate": 1.9213642897289807e-05, |
|
"loss": 0.8499, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0311878548667242, |
|
"learning_rate": 1.920922853318324e-05, |
|
"loss": 0.8404, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.5676170591497518, |
|
"learning_rate": 1.9204802323303533e-05, |
|
"loss": 0.8505, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.105576515631698, |
|
"learning_rate": 1.9200364273344096e-05, |
|
"loss": 0.8631, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1197845348326956, |
|
"learning_rate": 1.9195914389013557e-05, |
|
"loss": 0.8843, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.387690470463562, |
|
"learning_rate": 1.9191452676035785e-05, |
|
"loss": 0.7525, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1987114359010893, |
|
"learning_rate": 1.9186979140149845e-05, |
|
"loss": 0.8599, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.217781571332019, |
|
"learning_rate": 1.9182493787110027e-05, |
|
"loss": 0.8816, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.048348069880512, |
|
"learning_rate": 1.91779966226858e-05, |
|
"loss": 0.8502, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0833712301083942, |
|
"learning_rate": 1.9173487652661847e-05, |
|
"loss": 0.8391, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2372405354323792, |
|
"learning_rate": 1.9168966882838025e-05, |
|
"loss": 0.8567, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0414088831972983, |
|
"learning_rate": 1.9164434319029378e-05, |
|
"loss": 0.8912, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.252034938210078, |
|
"learning_rate": 1.9159889967066104e-05, |
|
"loss": 0.8629, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4159470689143745, |
|
"learning_rate": 1.915533383279358e-05, |
|
"loss": 0.8894, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.089863489159817, |
|
"learning_rate": 1.915076592207234e-05, |
|
"loss": 0.8936, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0763910526561575, |
|
"learning_rate": 1.9146186240778048e-05, |
|
"loss": 0.8483, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9653415746089279, |
|
"learning_rate": 1.9141594794801525e-05, |
|
"loss": 0.8416, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0979290386531497, |
|
"learning_rate": 1.9136991590048713e-05, |
|
"loss": 0.8149, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1134257431959327, |
|
"learning_rate": 1.91323766324407e-05, |
|
"loss": 0.9428, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.2475415888372035, |
|
"learning_rate": 1.912774992791366e-05, |
|
"loss": 0.8359, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1322862514712104, |
|
"learning_rate": 1.9123111482418908e-05, |
|
"loss": 0.8473, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0202073961215956, |
|
"learning_rate": 1.9118461301922837e-05, |
|
"loss": 0.8386, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1772015279152943, |
|
"learning_rate": 1.911379939240695e-05, |
|
"loss": 0.8364, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9781738963360617, |
|
"learning_rate": 1.9109125759867834e-05, |
|
"loss": 0.931, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9690542279329114, |
|
"learning_rate": 1.9104440410317148e-05, |
|
"loss": 0.7959, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.8264203526007248, |
|
"learning_rate": 1.9099743349781633e-05, |
|
"loss": 0.8622, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0483364415879783, |
|
"learning_rate": 1.9095034584303086e-05, |
|
"loss": 0.8971, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1658429627200173, |
|
"learning_rate": 1.9090314119938362e-05, |
|
"loss": 0.8295, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1851938754575262, |
|
"learning_rate": 1.9085581962759366e-05, |
|
"loss": 0.8943, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9887334244222568, |
|
"learning_rate": 1.9080838118853043e-05, |
|
"loss": 0.8942, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.996368417491876, |
|
"learning_rate": 1.9076082594321368e-05, |
|
"loss": 0.8431, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1072723640538134, |
|
"learning_rate": 1.9071315395281344e-05, |
|
"loss": 0.8338, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1114720146010824, |
|
"learning_rate": 1.906653652786499e-05, |
|
"loss": 0.8687, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9795411708341635, |
|
"learning_rate": 1.906174599821933e-05, |
|
"loss": 0.8558, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0358155237590705, |
|
"learning_rate": 1.9056943812506395e-05, |
|
"loss": 0.8539, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0328294723383766, |
|
"learning_rate": 1.9052129976903206e-05, |
|
"loss": 0.9297, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.947373757200239, |
|
"learning_rate": 1.904730449760177e-05, |
|
"loss": 0.7847, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0349224123359808, |
|
"learning_rate": 1.9042467380809068e-05, |
|
"loss": 0.8638, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.958310460108622, |
|
"learning_rate": 1.903761863274706e-05, |
|
"loss": 0.7945, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.4179625401019371, |
|
"learning_rate": 1.903275825965265e-05, |
|
"loss": 0.8623, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9984916683954115, |
|
"learning_rate": 1.902788626777772e-05, |
|
"loss": 0.8547, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9696717947291953, |
|
"learning_rate": 1.902300266338907e-05, |
|
"loss": 0.9136, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0442302283722409, |
|
"learning_rate": 1.901810745276845e-05, |
|
"loss": 0.9077, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0615973187793764, |
|
"learning_rate": 1.9013200642212547e-05, |
|
"loss": 0.9122, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0278429898382326, |
|
"learning_rate": 1.9008282238032955e-05, |
|
"loss": 0.8604, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9677095617976222, |
|
"learning_rate": 1.9003352246556187e-05, |
|
"loss": 0.9101, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9735541688566568, |
|
"learning_rate": 1.8998410674123665e-05, |
|
"loss": 0.8234, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1110533273357168, |
|
"learning_rate": 1.8993457527091696e-05, |
|
"loss": 0.8805, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1119953319593077, |
|
"learning_rate": 1.8988492811831485e-05, |
|
"loss": 0.7915, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"eval_loss": 0.8526943325996399, |
|
"eval_runtime": 187.0435, |
|
"eval_samples_per_second": 216.271, |
|
"eval_steps_per_second": 27.036, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9717769254859505, |
|
"learning_rate": 1.8983516534729117e-05, |
|
"loss": 0.8541, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1567304120011481, |
|
"learning_rate": 1.8978528702185547e-05, |
|
"loss": 0.9735, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.045800467752564, |
|
"learning_rate": 1.8973529320616583e-05, |
|
"loss": 0.7481, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9988848948131525, |
|
"learning_rate": 1.896851839645291e-05, |
|
"loss": 0.8684, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.052167675344499, |
|
"learning_rate": 1.8963495936140036e-05, |
|
"loss": 0.8679, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1729615157564766, |
|
"learning_rate": 1.8958461946138335e-05, |
|
"loss": 0.8659, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.4141646096076652, |
|
"learning_rate": 1.8953416432922986e-05, |
|
"loss": 0.9133, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0173125987989802, |
|
"learning_rate": 1.8948359402984006e-05, |
|
"loss": 0.8216, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0734401456100644, |
|
"learning_rate": 1.894329086282622e-05, |
|
"loss": 0.8061, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0470846911584983, |
|
"learning_rate": 1.8938210818969257e-05, |
|
"loss": 0.8438, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0472618723061844, |
|
"learning_rate": 1.8933119277947548e-05, |
|
"loss": 0.8508, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0077323188601064, |
|
"learning_rate": 1.892801624631031e-05, |
|
"loss": 0.9165, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0844160173693849, |
|
"learning_rate": 1.8922901730621544e-05, |
|
"loss": 0.8232, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9761080617342631, |
|
"learning_rate": 1.8917775737460015e-05, |
|
"loss": 0.9321, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0110333862570609, |
|
"learning_rate": 1.8912638273419257e-05, |
|
"loss": 0.9282, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1126860660977669, |
|
"learning_rate": 1.890748934510756e-05, |
|
"loss": 0.8755, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.974615001889175, |
|
"learning_rate": 1.8902328959147953e-05, |
|
"loss": 0.833, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.04420546634678, |
|
"learning_rate": 1.8897157122178216e-05, |
|
"loss": 0.9264, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9882139360408511, |
|
"learning_rate": 1.8891973840850844e-05, |
|
"loss": 0.8147, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9698350901525421, |
|
"learning_rate": 1.8886779121833065e-05, |
|
"loss": 0.797, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0892458752220342, |
|
"learning_rate": 1.8881572971806808e-05, |
|
"loss": 0.8928, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.9279387036040393, |
|
"learning_rate": 1.8876355397468714e-05, |
|
"loss": 0.8401, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0113032414744425, |
|
"learning_rate": 1.8871126405530113e-05, |
|
"loss": 0.9115, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9129421884681735, |
|
"learning_rate": 1.8865886002717034e-05, |
|
"loss": 0.8143, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.3098098146866635, |
|
"learning_rate": 1.8860634195770165e-05, |
|
"loss": 1.0118, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9903655628930513, |
|
"learning_rate": 1.8855370991444876e-05, |
|
"loss": 0.8416, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0163609363523551, |
|
"learning_rate": 1.8850096396511196e-05, |
|
"loss": 0.8967, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.058332850451146, |
|
"learning_rate": 1.8844810417753797e-05, |
|
"loss": 0.8814, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0760460518670376, |
|
"learning_rate": 1.8839513061972007e-05, |
|
"loss": 0.8574, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9547071896987184, |
|
"learning_rate": 1.8834204335979777e-05, |
|
"loss": 0.8741, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9830111500512667, |
|
"learning_rate": 1.8828884246605696e-05, |
|
"loss": 0.857, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9666641431423578, |
|
"learning_rate": 1.882355280069295e-05, |
|
"loss": 0.8321, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.4401922028564462, |
|
"learning_rate": 1.8818210005099355e-05, |
|
"loss": 0.8167, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0669102521388445, |
|
"learning_rate": 1.881285586669731e-05, |
|
"loss": 0.7776, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0093813108688519, |
|
"learning_rate": 1.8807490392373808e-05, |
|
"loss": 0.841, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.7570201266726655, |
|
"learning_rate": 1.880211358903043e-05, |
|
"loss": 0.9326, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9953583010172994, |
|
"learning_rate": 1.879672546358332e-05, |
|
"loss": 0.8339, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0508471775378638, |
|
"learning_rate": 1.8791326022963197e-05, |
|
"loss": 0.8643, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.07762751139997, |
|
"learning_rate": 1.8785915274115322e-05, |
|
"loss": 0.8097, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0601387141355354, |
|
"learning_rate": 1.8780493223999508e-05, |
|
"loss": 0.834, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.021216968150058, |
|
"learning_rate": 1.8775059879590106e-05, |
|
"loss": 0.8466, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.092057770038313, |
|
"learning_rate": 1.876961524787599e-05, |
|
"loss": 0.8418, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0761804463517708, |
|
"learning_rate": 1.876415933586056e-05, |
|
"loss": 0.8096, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0382523513195845, |
|
"learning_rate": 1.8758692150561718e-05, |
|
"loss": 0.8371, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0560965482503217, |
|
"learning_rate": 1.8753213699011877e-05, |
|
"loss": 0.9391, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.5019300766603845, |
|
"learning_rate": 1.8747723988257924e-05, |
|
"loss": 0.8927, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.053684928562445, |
|
"learning_rate": 1.8742223025361245e-05, |
|
"loss": 0.9148, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.2566833905549095, |
|
"learning_rate": 1.8736710817397697e-05, |
|
"loss": 0.8452, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9516384274941435, |
|
"learning_rate": 1.8731187371457594e-05, |
|
"loss": 0.8479, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.244319475578046, |
|
"learning_rate": 1.8725652694645714e-05, |
|
"loss": 0.806, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9965107061476687, |
|
"learning_rate": 1.8720106794081276e-05, |
|
"loss": 0.8408, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9923816429015672, |
|
"learning_rate": 1.8714549676897936e-05, |
|
"loss": 0.8249, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0276121053745104, |
|
"learning_rate": 1.870898135024378e-05, |
|
"loss": 0.9268, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9716000925703704, |
|
"learning_rate": 1.8703401821281317e-05, |
|
"loss": 0.8661, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.028051340486763, |
|
"learning_rate": 1.8697811097187445e-05, |
|
"loss": 0.8388, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.2861670965261067, |
|
"learning_rate": 1.8692209185153493e-05, |
|
"loss": 0.8534, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0530293580854682, |
|
"learning_rate": 1.868659609238516e-05, |
|
"loss": 0.8347, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9979681252925467, |
|
"learning_rate": 1.868097182610253e-05, |
|
"loss": 0.8898, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.05569167326003, |
|
"learning_rate": 1.8675336393540065e-05, |
|
"loss": 0.9221, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0355101234684094, |
|
"learning_rate": 1.8669689801946585e-05, |
|
"loss": 0.8189, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0206561288788434, |
|
"learning_rate": 1.8664032058585265e-05, |
|
"loss": 0.8029, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1234982856261049, |
|
"learning_rate": 1.8658363170733625e-05, |
|
"loss": 0.8269, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0286971576376855, |
|
"learning_rate": 1.8652683145683523e-05, |
|
"loss": 0.9344, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0455324514804116, |
|
"learning_rate": 1.8646991990741138e-05, |
|
"loss": 0.889, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0032390735498653, |
|
"learning_rate": 1.8641289713226963e-05, |
|
"loss": 0.8517, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1131893195292746, |
|
"learning_rate": 1.863557632047581e-05, |
|
"loss": 0.8676, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0714530814917393, |
|
"learning_rate": 1.8629851819836774e-05, |
|
"loss": 0.8465, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.944334257350204, |
|
"learning_rate": 1.862411621867324e-05, |
|
"loss": 0.7745, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0785324680314694, |
|
"learning_rate": 1.8618369524362895e-05, |
|
"loss": 0.8505, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0253026699762982, |
|
"learning_rate": 1.861261174429765e-05, |
|
"loss": 0.8498, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1073123137203067, |
|
"learning_rate": 1.8606842885883724e-05, |
|
"loss": 0.8425, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0138475687542496, |
|
"learning_rate": 1.860106295654155e-05, |
|
"loss": 0.8327, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.983905689468371, |
|
"learning_rate": 1.8595271963705822e-05, |
|
"loss": 0.8286, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9978766241312532, |
|
"learning_rate": 1.8589469914825457e-05, |
|
"loss": 0.8048, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9888483691868927, |
|
"learning_rate": 1.858365681736359e-05, |
|
"loss": 0.8162, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9966944146838984, |
|
"learning_rate": 1.8577832678797575e-05, |
|
"loss": 0.8313, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.069934140934502, |
|
"learning_rate": 1.8571997506618964e-05, |
|
"loss": 0.8822, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9827128570955065, |
|
"learning_rate": 1.8566151308333502e-05, |
|
"loss": 0.8465, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1466769116887388, |
|
"learning_rate": 1.856029409146112e-05, |
|
"loss": 0.9171, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0198004108553271, |
|
"learning_rate": 1.8554425863535915e-05, |
|
"loss": 0.9252, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0242842691126035, |
|
"learning_rate": 1.854854663210616e-05, |
|
"loss": 0.7665, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0890534784704105, |
|
"learning_rate": 1.8542656404734264e-05, |
|
"loss": 0.8269, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9015971958546473, |
|
"learning_rate": 1.8536755188996797e-05, |
|
"loss": 0.8683, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0291552133995123, |
|
"learning_rate": 1.853084299248445e-05, |
|
"loss": 0.8375, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0379854385313003, |
|
"learning_rate": 1.852491982280205e-05, |
|
"loss": 0.8745, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0475983349262212, |
|
"learning_rate": 1.851898568756853e-05, |
|
"loss": 0.8367, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0198449464309953, |
|
"learning_rate": 1.8513040594416934e-05, |
|
"loss": 0.8291, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0449883854278477, |
|
"learning_rate": 1.850708455099439e-05, |
|
"loss": 0.8851, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0197024852454293, |
|
"learning_rate": 1.850111756496213e-05, |
|
"loss": 0.8032, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9918341986182831, |
|
"learning_rate": 1.849513964399545e-05, |
|
"loss": 0.8112, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0484201108935518, |
|
"learning_rate": 1.8489150795783702e-05, |
|
"loss": 0.8965, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9689254913631651, |
|
"learning_rate": 1.848315102803031e-05, |
|
"loss": 0.8887, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9706717251970238, |
|
"learning_rate": 1.8477140348452745e-05, |
|
"loss": 0.8358, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0842401799268369, |
|
"learning_rate": 1.8471118764782497e-05, |
|
"loss": 0.8892, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.001371823170473, |
|
"learning_rate": 1.8465086284765093e-05, |
|
"loss": 0.8611, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9558206663655661, |
|
"learning_rate": 1.8459042916160077e-05, |
|
"loss": 0.8917, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0018787110080574, |
|
"learning_rate": 1.8452988666740993e-05, |
|
"loss": 0.8895, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9565424323384727, |
|
"learning_rate": 1.844692354429539e-05, |
|
"loss": 0.847, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9944052423681737, |
|
"learning_rate": 1.8440847556624794e-05, |
|
"loss": 0.8445, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0571874002170982, |
|
"learning_rate": 1.8434760711544707e-05, |
|
"loss": 0.9123, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_loss": 0.8448428511619568, |
|
"eval_runtime": 187.3148, |
|
"eval_samples_per_second": 215.957, |
|
"eval_steps_per_second": 26.997, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0692919112422168, |
|
"learning_rate": 1.8428663016884606e-05, |
|
"loss": 0.835, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0624352078303714, |
|
"learning_rate": 1.842255448048791e-05, |
|
"loss": 0.92, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0217814818022022, |
|
"learning_rate": 1.8416435110211998e-05, |
|
"loss": 0.8442, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0079541176167188, |
|
"learning_rate": 1.841030491392817e-05, |
|
"loss": 0.8381, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0605990654308264, |
|
"learning_rate": 1.8404163899521666e-05, |
|
"loss": 0.8171, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0802017142498224, |
|
"learning_rate": 1.8398012074891636e-05, |
|
"loss": 0.9261, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9980562068741823, |
|
"learning_rate": 1.8391849447951128e-05, |
|
"loss": 0.7851, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0262857177043048, |
|
"learning_rate": 1.838567602662709e-05, |
|
"loss": 0.8495, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0383479353156029, |
|
"learning_rate": 1.837949181886036e-05, |
|
"loss": 0.8686, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9725276530954041, |
|
"learning_rate": 1.8373296832605647e-05, |
|
"loss": 0.8217, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.96459871670791, |
|
"learning_rate": 1.8367091075831517e-05, |
|
"loss": 0.8748, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9652669926432812, |
|
"learning_rate": 1.83608745565204e-05, |
|
"loss": 0.8963, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9725870173994274, |
|
"learning_rate": 1.8354647282668566e-05, |
|
"loss": 0.8629, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0224955249723027, |
|
"learning_rate": 1.8348409262286117e-05, |
|
"loss": 0.8649, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.96997495024294, |
|
"learning_rate": 1.8342160503396975e-05, |
|
"loss": 0.8625, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9528776424597024, |
|
"learning_rate": 1.833590101403889e-05, |
|
"loss": 0.7936, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0092126509995532, |
|
"learning_rate": 1.8329630802263397e-05, |
|
"loss": 0.8944, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1003855463170757, |
|
"learning_rate": 1.8323349876135836e-05, |
|
"loss": 0.9236, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9882424235256009, |
|
"learning_rate": 1.8317058243735312e-05, |
|
"loss": 0.8451, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.963270254631444, |
|
"learning_rate": 1.8310755913154726e-05, |
|
"loss": 0.9003, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.070834999042204, |
|
"learning_rate": 1.8304442892500716e-05, |
|
"loss": 0.8523, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9218568344531317, |
|
"learning_rate": 1.8298119189893686e-05, |
|
"loss": 0.814, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0121387565853985, |
|
"learning_rate": 1.8291784813467775e-05, |
|
"loss": 0.8424, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.6096637215697438, |
|
"learning_rate": 1.828543977137085e-05, |
|
"loss": 0.9638, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0093617823228238, |
|
"learning_rate": 1.82790840717645e-05, |
|
"loss": 0.8431, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0192552554698213, |
|
"learning_rate": 1.8272717722824024e-05, |
|
"loss": 0.8596, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.3776610014995394, |
|
"learning_rate": 1.826634073273841e-05, |
|
"loss": 1.0204, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0675434029261415, |
|
"learning_rate": 1.825995310971035e-05, |
|
"loss": 0.8003, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9842725904044134, |
|
"learning_rate": 1.8253554861956196e-05, |
|
"loss": 0.8682, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0144622853869512, |
|
"learning_rate": 1.8247145997705977e-05, |
|
"loss": 0.8425, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9797681977185918, |
|
"learning_rate": 1.8240726525203373e-05, |
|
"loss": 0.7792, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0253288961176894, |
|
"learning_rate": 1.8234296452705715e-05, |
|
"loss": 0.8911, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9908479790600693, |
|
"learning_rate": 1.8227855788483966e-05, |
|
"loss": 0.7824, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0055635922446198, |
|
"learning_rate": 1.8221404540822706e-05, |
|
"loss": 0.8183, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0186612448588224, |
|
"learning_rate": 1.8214942718020138e-05, |
|
"loss": 0.8881, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.1193248624591543, |
|
"learning_rate": 1.8208470328388065e-05, |
|
"loss": 0.9409, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0941512225566314, |
|
"learning_rate": 1.820198738025188e-05, |
|
"loss": 0.7906, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.1150873133380623, |
|
"learning_rate": 1.8195493881950566e-05, |
|
"loss": 0.8611, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9378843998364783, |
|
"learning_rate": 1.8188989841836656e-05, |
|
"loss": 0.773, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9753210492248877, |
|
"learning_rate": 1.8182475268276265e-05, |
|
"loss": 0.8462, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0828463958224648, |
|
"learning_rate": 1.8175950169649048e-05, |
|
"loss": 0.8576, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9769233721699588, |
|
"learning_rate": 1.8169414554348194e-05, |
|
"loss": 0.7836, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0461794610793929, |
|
"learning_rate": 1.8162868430780426e-05, |
|
"loss": 0.8385, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.003015728656599, |
|
"learning_rate": 1.8156311807365985e-05, |
|
"loss": 0.8552, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0186870649283732, |
|
"learning_rate": 1.814974469253861e-05, |
|
"loss": 0.8379, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0451464039674105, |
|
"learning_rate": 1.8143167094745544e-05, |
|
"loss": 0.9206, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9836969096682053, |
|
"learning_rate": 1.8136579022447506e-05, |
|
"loss": 0.8315, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1918878087256983, |
|
"learning_rate": 1.81299804841187e-05, |
|
"loss": 0.7708, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.13014118954486, |
|
"learning_rate": 1.8123371488246776e-05, |
|
"loss": 0.8264, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.095613581352552, |
|
"learning_rate": 1.8116752043332848e-05, |
|
"loss": 0.8275, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.001887759694024, |
|
"learning_rate": 1.8110122157891466e-05, |
|
"loss": 0.8993, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.0591263293529547, |
|
"learning_rate": 1.810348184045061e-05, |
|
"loss": 0.8239, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9867565259786909, |
|
"learning_rate": 1.8096831099551675e-05, |
|
"loss": 0.8347, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9573471269079379, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 0.8389, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0815591626860135, |
|
"learning_rate": 1.808349838161221e-05, |
|
"loss": 0.8124, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0660151285755348, |
|
"learning_rate": 1.807681642172147e-05, |
|
"loss": 0.9354, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9862775055482695, |
|
"learning_rate": 1.8070124072672212e-05, |
|
"loss": 0.8433, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9895614964471511, |
|
"learning_rate": 1.806342134307277e-05, |
|
"loss": 0.8612, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9589334452418762, |
|
"learning_rate": 1.805670824154482e-05, |
|
"loss": 0.8296, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9501049834625723, |
|
"learning_rate": 1.8049984776723383e-05, |
|
"loss": 0.8283, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1151775277350218, |
|
"learning_rate": 1.804325095725681e-05, |
|
"loss": 0.795, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0790263646019147, |
|
"learning_rate": 1.803650679180677e-05, |
|
"loss": 0.9071, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9658054663092771, |
|
"learning_rate": 1.8029752289048245e-05, |
|
"loss": 0.7951, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.06270294763586, |
|
"learning_rate": 1.8022987457669504e-05, |
|
"loss": 0.8664, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.039694994665679, |
|
"learning_rate": 1.801621230637211e-05, |
|
"loss": 0.8906, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.9165308822871485, |
|
"learning_rate": 1.8009426843870897e-05, |
|
"loss": 0.8651, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.106059285409371, |
|
"learning_rate": 1.8002631078893964e-05, |
|
"loss": 0.8719, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9388909424266827, |
|
"learning_rate": 1.7995825020182664e-05, |
|
"loss": 0.8676, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9428941948815046, |
|
"learning_rate": 1.7989008676491576e-05, |
|
"loss": 0.7718, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9435866134778035, |
|
"learning_rate": 1.7982182056588536e-05, |
|
"loss": 0.9058, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9484400640960485, |
|
"learning_rate": 1.7975345169254574e-05, |
|
"loss": 0.8145, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0135613748858736, |
|
"learning_rate": 1.7968498023283937e-05, |
|
"loss": 0.8617, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9907037798755128, |
|
"learning_rate": 1.7961640627484064e-05, |
|
"loss": 0.8254, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0539851003236405, |
|
"learning_rate": 1.7954772990675583e-05, |
|
"loss": 0.8105, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.5109070638813087, |
|
"learning_rate": 1.7947895121692293e-05, |
|
"loss": 0.8989, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0570034476843884, |
|
"learning_rate": 1.794100702938115e-05, |
|
"loss": 0.8437, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0062280180181036, |
|
"learning_rate": 1.7934108722602266e-05, |
|
"loss": 0.8234, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0254412892858618, |
|
"learning_rate": 1.7927200210228895e-05, |
|
"loss": 0.8789, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.009355678226713, |
|
"learning_rate": 1.7920281501147406e-05, |
|
"loss": 0.8942, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9707762185300899, |
|
"learning_rate": 1.791335260425729e-05, |
|
"loss": 0.8253, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0507998746245961, |
|
"learning_rate": 1.7906413528471155e-05, |
|
"loss": 0.8184, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.014499078320948, |
|
"learning_rate": 1.7899464282714683e-05, |
|
"loss": 0.8316, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.2690613471208891, |
|
"learning_rate": 1.7892504875926643e-05, |
|
"loss": 0.8476, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.015777608061474, |
|
"learning_rate": 1.7885535317058882e-05, |
|
"loss": 0.8758, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0097208382065235, |
|
"learning_rate": 1.7878555615076294e-05, |
|
"loss": 0.7964, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0403303018421468, |
|
"learning_rate": 1.7871565778956836e-05, |
|
"loss": 0.8279, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0185895791071147, |
|
"learning_rate": 1.7864565817691478e-05, |
|
"loss": 0.9441, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.219088999355827, |
|
"learning_rate": 1.7857555740284234e-05, |
|
"loss": 0.9177, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.021606981348204, |
|
"learning_rate": 1.7850535555752127e-05, |
|
"loss": 0.8538, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.2146520398195872, |
|
"learning_rate": 1.7843505273125164e-05, |
|
"loss": 0.9698, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9344553257619521, |
|
"learning_rate": 1.7836464901446363e-05, |
|
"loss": 0.8474, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0275115933940366, |
|
"learning_rate": 1.782941444977171e-05, |
|
"loss": 0.7603, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0505661615764563, |
|
"learning_rate": 1.7822353927170154e-05, |
|
"loss": 0.8694, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.090766269758975, |
|
"learning_rate": 1.78152833427236e-05, |
|
"loss": 0.8273, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0174580986492128, |
|
"learning_rate": 1.78082027055269e-05, |
|
"loss": 0.7529, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0021861850530214, |
|
"learning_rate": 1.780111202468783e-05, |
|
"loss": 0.9338, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0609059318746341, |
|
"learning_rate": 1.7794011309327094e-05, |
|
"loss": 0.8543, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9551542638515328, |
|
"learning_rate": 1.778690056857829e-05, |
|
"loss": 0.8321, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1289162721981014, |
|
"learning_rate": 1.7779779811587927e-05, |
|
"loss": 0.8622, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0578729963198346, |
|
"learning_rate": 1.7772649047515384e-05, |
|
"loss": 0.7849, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"eval_loss": 0.8381435871124268, |
|
"eval_runtime": 187.1931, |
|
"eval_samples_per_second": 216.098, |
|
"eval_steps_per_second": 27.015, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9279732332051667, |
|
"learning_rate": 1.7765508285532923e-05, |
|
"loss": 0.7979, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0143917337734483, |
|
"learning_rate": 1.7758357534825658e-05, |
|
"loss": 0.8383, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0124719670852789, |
|
"learning_rate": 1.7751196804591554e-05, |
|
"loss": 0.9105, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0404174429801545, |
|
"learning_rate": 1.7744026104041422e-05, |
|
"loss": 0.8057, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0203141485006675, |
|
"learning_rate": 1.7736845442398884e-05, |
|
"loss": 0.8641, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0338735850682625, |
|
"learning_rate": 1.7729654828900372e-05, |
|
"loss": 0.7828, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9554435548551307, |
|
"learning_rate": 1.7722454272795142e-05, |
|
"loss": 0.7999, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0675538600899108, |
|
"learning_rate": 1.7715243783345215e-05, |
|
"loss": 0.8248, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0334356374016562, |
|
"learning_rate": 1.77080233698254e-05, |
|
"loss": 0.9482, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9625543396191655, |
|
"learning_rate": 1.7700793041523272e-05, |
|
"loss": 0.807, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1920966108340543, |
|
"learning_rate": 1.7693552807739155e-05, |
|
"loss": 1.0113, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1397337490346742, |
|
"learning_rate": 1.7686302677786118e-05, |
|
"loss": 0.8753, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0365348300829782, |
|
"learning_rate": 1.7679042660989956e-05, |
|
"loss": 0.8879, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0343554642761457, |
|
"learning_rate": 1.7671772766689185e-05, |
|
"loss": 0.8311, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0824083716448243, |
|
"learning_rate": 1.7664493004235023e-05, |
|
"loss": 1.0056, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0099315138119533, |
|
"learning_rate": 1.7657203382991384e-05, |
|
"loss": 0.8656, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.976404160597241, |
|
"learning_rate": 1.7649903912334864e-05, |
|
"loss": 0.7945, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.2084848684695084, |
|
"learning_rate": 1.7642594601654726e-05, |
|
"loss": 0.8635, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9971992358419232, |
|
"learning_rate": 1.7635275460352885e-05, |
|
"loss": 0.8541, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0726438623468066, |
|
"learning_rate": 1.7627946497843917e-05, |
|
"loss": 1.0002, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0289865886566527, |
|
"learning_rate": 1.762060772355501e-05, |
|
"loss": 0.795, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.973501695572963, |
|
"learning_rate": 1.761325914692599e-05, |
|
"loss": 0.7983, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9356583400580673, |
|
"learning_rate": 1.7605900777409282e-05, |
|
"loss": 0.8379, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9969102865233515, |
|
"learning_rate": 1.7598532624469916e-05, |
|
"loss": 0.8237, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9193759607412801, |
|
"learning_rate": 1.7591154697585496e-05, |
|
"loss": 0.7915, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0241626821655354, |
|
"learning_rate": 1.7583767006246203e-05, |
|
"loss": 0.8324, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9535841484168424, |
|
"learning_rate": 1.757636955995478e-05, |
|
"loss": 0.82, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0646472607324255, |
|
"learning_rate": 1.7568962368226517e-05, |
|
"loss": 0.8231, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.02174942247018, |
|
"learning_rate": 1.7561545440589237e-05, |
|
"loss": 0.9426, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.931456362698152, |
|
"learning_rate": 1.755411878658329e-05, |
|
"loss": 0.7884, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.084058916314809, |
|
"learning_rate": 1.754668241576153e-05, |
|
"loss": 0.8664, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.996047425069412, |
|
"learning_rate": 1.7539236337689317e-05, |
|
"loss": 0.8128, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9554163336960624, |
|
"learning_rate": 1.75317805619445e-05, |
|
"loss": 0.8238, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9330046866060917, |
|
"learning_rate": 1.7524315098117385e-05, |
|
"loss": 0.8176, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9760340454661153, |
|
"learning_rate": 1.751683995581076e-05, |
|
"loss": 0.7941, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9449691883644274, |
|
"learning_rate": 1.7509355144639853e-05, |
|
"loss": 0.8796, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8772646626675931, |
|
"learning_rate": 1.7501860674232327e-05, |
|
"loss": 0.7869, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9696546113821329, |
|
"learning_rate": 1.7494356554228277e-05, |
|
"loss": 0.8105, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9528515867071141, |
|
"learning_rate": 1.7486842794280198e-05, |
|
"loss": 0.8852, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.962161515570676, |
|
"learning_rate": 1.7479319404053004e-05, |
|
"loss": 0.8502, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9023054126262993, |
|
"learning_rate": 1.7471786393223973e-05, |
|
"loss": 0.8616, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0941424697568807, |
|
"learning_rate": 1.7464243771482778e-05, |
|
"loss": 0.8815, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9951238405318317, |
|
"learning_rate": 1.7456691548531442e-05, |
|
"loss": 0.8328, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9178551178716932, |
|
"learning_rate": 1.744912973408434e-05, |
|
"loss": 0.8078, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9496311013052559, |
|
"learning_rate": 1.74415583378682e-05, |
|
"loss": 0.8138, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9235589923434312, |
|
"learning_rate": 1.7433977369622044e-05, |
|
"loss": 0.8794, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9272481982717993, |
|
"learning_rate": 1.7426386839097233e-05, |
|
"loss": 0.8177, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0040310234682746, |
|
"learning_rate": 1.741878675605742e-05, |
|
"loss": 0.8455, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9249804834714497, |
|
"learning_rate": 1.741117713027854e-05, |
|
"loss": 0.798, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9297848707023268, |
|
"learning_rate": 1.740355797154881e-05, |
|
"loss": 0.8504, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9926542817126969, |
|
"learning_rate": 1.7395929289668707e-05, |
|
"loss": 0.858, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9213349718735717, |
|
"learning_rate": 1.7388291094450953e-05, |
|
"loss": 0.8135, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.981486353205901, |
|
"learning_rate": 1.7380643395720517e-05, |
|
"loss": 0.8525, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9204065547943485, |
|
"learning_rate": 1.7372986203314578e-05, |
|
"loss": 0.755, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9738300309612525, |
|
"learning_rate": 1.736531952708254e-05, |
|
"loss": 0.8743, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9452690514025663, |
|
"learning_rate": 1.7357643376886004e-05, |
|
"loss": 0.837, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9727629030140199, |
|
"learning_rate": 1.7349957762598745e-05, |
|
"loss": 0.8241, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9099279720365867, |
|
"learning_rate": 1.734226269410673e-05, |
|
"loss": 0.7765, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.018538174707947, |
|
"learning_rate": 1.733455818130807e-05, |
|
"loss": 0.8626, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9232631930298351, |
|
"learning_rate": 1.7326844234113037e-05, |
|
"loss": 0.8506, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9405169893976543, |
|
"learning_rate": 1.731912086244403e-05, |
|
"loss": 0.9013, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9350119648542045, |
|
"learning_rate": 1.7311388076235576e-05, |
|
"loss": 0.8907, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9474704446032522, |
|
"learning_rate": 1.7303645885434305e-05, |
|
"loss": 0.8539, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8704134009300898, |
|
"learning_rate": 1.7295894299998953e-05, |
|
"loss": 0.745, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9615910463364002, |
|
"learning_rate": 1.7288133329900337e-05, |
|
"loss": 0.8242, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9919439198316098, |
|
"learning_rate": 1.728036298512134e-05, |
|
"loss": 0.8495, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.877565931650257, |
|
"learning_rate": 1.727258327565691e-05, |
|
"loss": 0.8206, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9878950331460611, |
|
"learning_rate": 1.7264794211514042e-05, |
|
"loss": 0.7938, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9787049714101482, |
|
"learning_rate": 1.725699580271176e-05, |
|
"loss": 0.7985, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9885640942295232, |
|
"learning_rate": 1.72491880592811e-05, |
|
"loss": 0.8215, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9808580950113753, |
|
"learning_rate": 1.724137099126512e-05, |
|
"loss": 0.8952, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0343691086853437, |
|
"learning_rate": 1.7233544608718866e-05, |
|
"loss": 0.9449, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0211962127955467, |
|
"learning_rate": 1.7225708921709363e-05, |
|
"loss": 0.9369, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0294479359588957, |
|
"learning_rate": 1.721786394031561e-05, |
|
"loss": 0.8511, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9820784552509588, |
|
"learning_rate": 1.7210009674628553e-05, |
|
"loss": 0.7821, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.1866701805556474, |
|
"learning_rate": 1.720214613475109e-05, |
|
"loss": 0.9516, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9445534415444331, |
|
"learning_rate": 1.719427333079804e-05, |
|
"loss": 0.879, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0884563423631934, |
|
"learning_rate": 1.718639127289614e-05, |
|
"loss": 0.854, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.3147407731744265, |
|
"learning_rate": 1.7178499971184032e-05, |
|
"loss": 0.8875, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9689204655821562, |
|
"learning_rate": 1.7170599435812253e-05, |
|
"loss": 0.8946, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.047014670498161, |
|
"learning_rate": 1.7162689676943206e-05, |
|
"loss": 0.8802, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0306720035023686, |
|
"learning_rate": 1.7154770704751173e-05, |
|
"loss": 0.8289, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9451205937730558, |
|
"learning_rate": 1.7146842529422268e-05, |
|
"loss": 0.8247, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.151775737564422, |
|
"learning_rate": 1.713890516115446e-05, |
|
"loss": 0.8381, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8978775908811445, |
|
"learning_rate": 1.7130958610157535e-05, |
|
"loss": 0.8475, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9936108721694572, |
|
"learning_rate": 1.712300288665309e-05, |
|
"loss": 0.8586, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0384341707306826, |
|
"learning_rate": 1.7115038000874526e-05, |
|
"loss": 0.9073, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0698844445150597, |
|
"learning_rate": 1.7107063963067025e-05, |
|
"loss": 0.8908, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.2170675037604572, |
|
"learning_rate": 1.7099080783487535e-05, |
|
"loss": 0.8864, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9251427056637105, |
|
"learning_rate": 1.709108847240478e-05, |
|
"loss": 0.7828, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0492859628400695, |
|
"learning_rate": 1.7083087040099218e-05, |
|
"loss": 0.8347, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9566010306136947, |
|
"learning_rate": 1.7075076496863035e-05, |
|
"loss": 0.785, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0395573578976345, |
|
"learning_rate": 1.7067056853000146e-05, |
|
"loss": 0.8052, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0356951969375556, |
|
"learning_rate": 1.7059028118826173e-05, |
|
"loss": 0.7623, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8986456959574445, |
|
"learning_rate": 1.7050990304668423e-05, |
|
"loss": 0.8003, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.963288211867447, |
|
"learning_rate": 1.7042943420865884e-05, |
|
"loss": 0.8653, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.019609500131109, |
|
"learning_rate": 1.7034887477769215e-05, |
|
"loss": 0.7268, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.939994639028726, |
|
"learning_rate": 1.7026822485740725e-05, |
|
"loss": 0.7512, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0241026823320927, |
|
"learning_rate": 1.7018748455154365e-05, |
|
"loss": 0.8455, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.100237354106455, |
|
"learning_rate": 1.7010665396395706e-05, |
|
"loss": 0.8381, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 0.8343697190284729, |
|
"eval_runtime": 187.1773, |
|
"eval_samples_per_second": 216.116, |
|
"eval_steps_per_second": 27.017, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9781956877736848, |
|
"learning_rate": 1.700257331986194e-05, |
|
"loss": 0.8316, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.1589816575272422, |
|
"learning_rate": 1.699447223596185e-05, |
|
"loss": 0.9097, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9441271210063006, |
|
"learning_rate": 1.6986362155115812e-05, |
|
"loss": 0.7765, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.8544844261366595, |
|
"learning_rate": 1.6978243087755775e-05, |
|
"loss": 0.8429, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9275645374138108, |
|
"learning_rate": 1.697011504432524e-05, |
|
"loss": 0.7922, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9188009246134373, |
|
"learning_rate": 1.6961978035279262e-05, |
|
"loss": 0.7762, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.969499301955575, |
|
"learning_rate": 1.6953832071084422e-05, |
|
"loss": 0.7442, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.980154184825822, |
|
"learning_rate": 1.6945677162218824e-05, |
|
"loss": 0.7951, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0230622276939902, |
|
"learning_rate": 1.6937513319172078e-05, |
|
"loss": 0.8171, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8991431113287323, |
|
"learning_rate": 1.6929340552445283e-05, |
|
"loss": 0.8356, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0510503515631233, |
|
"learning_rate": 1.692115887255102e-05, |
|
"loss": 0.827, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9178701479375049, |
|
"learning_rate": 1.6912968290013325e-05, |
|
"loss": 0.7745, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9360018185950565, |
|
"learning_rate": 1.69047688153677e-05, |
|
"loss": 0.7774, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.316296501512368, |
|
"learning_rate": 1.689656045916108e-05, |
|
"loss": 0.8717, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9527242777681525, |
|
"learning_rate": 1.6888343231951817e-05, |
|
"loss": 0.8446, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9559347218205472, |
|
"learning_rate": 1.6880117144309682e-05, |
|
"loss": 0.7686, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9243473396928792, |
|
"learning_rate": 1.6871882206815838e-05, |
|
"loss": 0.843, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0471388055538455, |
|
"learning_rate": 1.6863638430062835e-05, |
|
"loss": 0.8021, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0681533849169953, |
|
"learning_rate": 1.6855385824654595e-05, |
|
"loss": 0.9617, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9907503662351598, |
|
"learning_rate": 1.6847124401206384e-05, |
|
"loss": 0.8398, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.1267861144590767, |
|
"learning_rate": 1.6838854170344827e-05, |
|
"loss": 0.9212, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9371696927779393, |
|
"learning_rate": 1.6830575142707866e-05, |
|
"loss": 0.8774, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.963308008855645, |
|
"learning_rate": 1.6822287328944765e-05, |
|
"loss": 0.8295, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9109515253684652, |
|
"learning_rate": 1.681399073971609e-05, |
|
"loss": 0.7968, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9512690574893296, |
|
"learning_rate": 1.6805685385693684e-05, |
|
"loss": 0.7919, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9277861987245727, |
|
"learning_rate": 1.679737127756068e-05, |
|
"loss": 0.8529, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9786371869517354, |
|
"learning_rate": 1.6789048426011456e-05, |
|
"loss": 0.8313, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8669153191576762, |
|
"learning_rate": 1.678071684175165e-05, |
|
"loss": 0.7609, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9367989371618103, |
|
"learning_rate": 1.6772376535498122e-05, |
|
"loss": 0.9188, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9319488948306222, |
|
"learning_rate": 1.676402751797896e-05, |
|
"loss": 0.8388, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9060314101494492, |
|
"learning_rate": 1.6755669799933455e-05, |
|
"loss": 0.8088, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9133531832761467, |
|
"learning_rate": 1.674730339211208e-05, |
|
"loss": 0.8333, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8858572520830194, |
|
"learning_rate": 1.6738928305276498e-05, |
|
"loss": 0.7837, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9748551828519176, |
|
"learning_rate": 1.673054455019953e-05, |
|
"loss": 0.8224, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9837368048123739, |
|
"learning_rate": 1.6722152137665147e-05, |
|
"loss": 0.7404, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9613075813336643, |
|
"learning_rate": 1.671375107846846e-05, |
|
"loss": 0.8452, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9022543493122955, |
|
"learning_rate": 1.6705341383415694e-05, |
|
"loss": 0.7479, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9381842639325487, |
|
"learning_rate": 1.6696923063324192e-05, |
|
"loss": 0.8381, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.0225858550563163, |
|
"learning_rate": 1.668849612902238e-05, |
|
"loss": 0.8341, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9251227060209148, |
|
"learning_rate": 1.6680060591349774e-05, |
|
"loss": 0.8935, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9775603659563589, |
|
"learning_rate": 1.6671616461156963e-05, |
|
"loss": 0.9297, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.1795119299407328, |
|
"learning_rate": 1.6663163749305568e-05, |
|
"loss": 0.8387, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0465229174302006, |
|
"learning_rate": 1.665470246666826e-05, |
|
"loss": 0.8438, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.98481295597245, |
|
"learning_rate": 1.6646232624128737e-05, |
|
"loss": 0.8428, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8961490284253136, |
|
"learning_rate": 1.66377542325817e-05, |
|
"loss": 0.7532, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9818574745716003, |
|
"learning_rate": 1.662926730293286e-05, |
|
"loss": 0.8756, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.1230544122671045, |
|
"learning_rate": 1.6620771846098893e-05, |
|
"loss": 0.9136, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0068717380378494, |
|
"learning_rate": 1.6612267873007456e-05, |
|
"loss": 0.9546, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.96385028656652, |
|
"learning_rate": 1.6603755394597155e-05, |
|
"loss": 0.9825, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9111777420506347, |
|
"learning_rate": 1.659523442181754e-05, |
|
"loss": 0.7414, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9688593070694009, |
|
"learning_rate": 1.6586704965629082e-05, |
|
"loss": 0.8562, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9412426968470645, |
|
"learning_rate": 1.6578167037003173e-05, |
|
"loss": 0.8365, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9955700458307191, |
|
"learning_rate": 1.656962064692209e-05, |
|
"loss": 0.8033, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9691385337948485, |
|
"learning_rate": 1.6561065806379005e-05, |
|
"loss": 0.8894, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.272726220321745, |
|
"learning_rate": 1.6552502526377952e-05, |
|
"loss": 0.9017, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.1010411361156522, |
|
"learning_rate": 1.6543930817933833e-05, |
|
"loss": 0.9577, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9585360635545656, |
|
"learning_rate": 1.6535350692072377e-05, |
|
"loss": 0.7982, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9322358583855594, |
|
"learning_rate": 1.6526762159830145e-05, |
|
"loss": 0.9181, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9944886470410574, |
|
"learning_rate": 1.6518165232254515e-05, |
|
"loss": 0.8202, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0730618597851427, |
|
"learning_rate": 1.6509559920403663e-05, |
|
"loss": 0.8973, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.971148988153191, |
|
"learning_rate": 1.6500946235346545e-05, |
|
"loss": 0.8835, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.1248576590356607, |
|
"learning_rate": 1.649232418816289e-05, |
|
"loss": 0.8985, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9562539651810333, |
|
"learning_rate": 1.6483693789943184e-05, |
|
"loss": 0.8613, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9405416826009309, |
|
"learning_rate": 1.6475055051788656e-05, |
|
"loss": 0.7449, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9773248641215648, |
|
"learning_rate": 1.646640798481126e-05, |
|
"loss": 0.7838, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9352806966087698, |
|
"learning_rate": 1.645775260013366e-05, |
|
"loss": 0.9275, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.3131597709452303, |
|
"learning_rate": 1.644908890888923e-05, |
|
"loss": 0.829, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0087536169626201, |
|
"learning_rate": 1.644041692222202e-05, |
|
"loss": 0.8893, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9344144260658866, |
|
"learning_rate": 1.6431736651286753e-05, |
|
"loss": 0.801, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0242357186289224, |
|
"learning_rate": 1.64230481072488e-05, |
|
"loss": 0.8546, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0407673296307454, |
|
"learning_rate": 1.6414351301284194e-05, |
|
"loss": 0.8561, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.3359537210790293, |
|
"learning_rate": 1.640564624457957e-05, |
|
"loss": 0.7949, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9652079053552713, |
|
"learning_rate": 1.6396932948332192e-05, |
|
"loss": 0.8802, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9971827000934618, |
|
"learning_rate": 1.6388211423749926e-05, |
|
"loss": 0.8079, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9873924164513037, |
|
"learning_rate": 1.63794816820512e-05, |
|
"loss": 0.9431, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.035708221786434, |
|
"learning_rate": 1.6370743734465037e-05, |
|
"loss": 0.8462, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0659960068581555, |
|
"learning_rate": 1.6361997592231002e-05, |
|
"loss": 0.7802, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.95199115267153, |
|
"learning_rate": 1.6353243266599204e-05, |
|
"loss": 0.8402, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0352302934148574, |
|
"learning_rate": 1.6344480768830272e-05, |
|
"loss": 0.8664, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0727757180183821, |
|
"learning_rate": 1.633571011019536e-05, |
|
"loss": 0.8469, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0103695746994772, |
|
"learning_rate": 1.632693130197611e-05, |
|
"loss": 0.8474, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9279816172567278, |
|
"learning_rate": 1.6318144355464636e-05, |
|
"loss": 0.8251, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0177667753191524, |
|
"learning_rate": 1.6309349281963554e-05, |
|
"loss": 0.8037, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0205024180657574, |
|
"learning_rate": 1.6300546092785893e-05, |
|
"loss": 0.8937, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8978191926449389, |
|
"learning_rate": 1.629173479925515e-05, |
|
"loss": 0.7662, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9794809130650164, |
|
"learning_rate": 1.6282915412705237e-05, |
|
"loss": 0.778, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.1484736613051145, |
|
"learning_rate": 1.627408794448047e-05, |
|
"loss": 0.8501, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.2006586269251032, |
|
"learning_rate": 1.6265252405935567e-05, |
|
"loss": 0.8275, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9565918259072778, |
|
"learning_rate": 1.6256408808435632e-05, |
|
"loss": 0.7764, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9230346491449858, |
|
"learning_rate": 1.6247557163356127e-05, |
|
"loss": 0.8537, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.1385277004351848, |
|
"learning_rate": 1.6238697482082865e-05, |
|
"loss": 0.8683, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9525638773784072, |
|
"learning_rate": 1.6229829776012e-05, |
|
"loss": 0.8534, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9347976283055996, |
|
"learning_rate": 1.6220954056550007e-05, |
|
"loss": 0.7843, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0583805539492264, |
|
"learning_rate": 1.621207033511367e-05, |
|
"loss": 0.8629, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0048885232922549, |
|
"learning_rate": 1.620317862313006e-05, |
|
"loss": 0.9306, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9232667435607793, |
|
"learning_rate": 1.6194278932036537e-05, |
|
"loss": 0.819, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8682759371657264, |
|
"learning_rate": 1.6185371273280716e-05, |
|
"loss": 0.7695, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.996774327536682, |
|
"learning_rate": 1.6176455658320458e-05, |
|
"loss": 0.8802, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9556030109023915, |
|
"learning_rate": 1.616753209862387e-05, |
|
"loss": 0.8849, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9688301385550866, |
|
"learning_rate": 1.6158600605669264e-05, |
|
"loss": 0.7652, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"eval_loss": 0.822993814945221, |
|
"eval_runtime": 187.3145, |
|
"eval_samples_per_second": 215.958, |
|
"eval_steps_per_second": 26.997, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9318473637952628, |
|
"learning_rate": 1.6149661190945166e-05, |
|
"loss": 0.8415, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9274136224074371, |
|
"learning_rate": 1.6140713865950295e-05, |
|
"loss": 0.8316, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9014362272472443, |
|
"learning_rate": 1.6131758642193532e-05, |
|
"loss": 0.8, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9216394797926182, |
|
"learning_rate": 1.6122795531193928e-05, |
|
"loss": 0.8132, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9725243755979751, |
|
"learning_rate": 1.6113824544480677e-05, |
|
"loss": 0.9002, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9882233530522131, |
|
"learning_rate": 1.61048456935931e-05, |
|
"loss": 0.8617, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9917887320236796, |
|
"learning_rate": 1.6095858990080645e-05, |
|
"loss": 0.8216, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.0406878445820225, |
|
"learning_rate": 1.608686444550285e-05, |
|
"loss": 0.9715, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.1217295732718597, |
|
"learning_rate": 1.607786207142933e-05, |
|
"loss": 0.8178, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.1019645894046524, |
|
"learning_rate": 1.606885187943979e-05, |
|
"loss": 0.9135, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9552174332315572, |
|
"learning_rate": 1.6059833881123992e-05, |
|
"loss": 0.8409, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9611142730552733, |
|
"learning_rate": 1.6050808088081724e-05, |
|
"loss": 0.9057, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.0288385302918484, |
|
"learning_rate": 1.6041774511922805e-05, |
|
"loss": 0.8507, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.886330987251663, |
|
"learning_rate": 1.6032733164267068e-05, |
|
"loss": 0.7642, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.951400710797225, |
|
"learning_rate": 1.602368405674435e-05, |
|
"loss": 0.8739, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9686000092526066, |
|
"learning_rate": 1.601462720099445e-05, |
|
"loss": 0.8591, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8930468558428761, |
|
"learning_rate": 1.600556260866715e-05, |
|
"loss": 0.8217, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9035192721578178, |
|
"learning_rate": 1.599649029142218e-05, |
|
"loss": 0.8376, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8646970486410438, |
|
"learning_rate": 1.5987410260929207e-05, |
|
"loss": 0.8474, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9248834234575523, |
|
"learning_rate": 1.597832252886781e-05, |
|
"loss": 0.8444, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9427172975191896, |
|
"learning_rate": 1.5969227106927484e-05, |
|
"loss": 0.826, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.4077800946655261, |
|
"learning_rate": 1.596012400680762e-05, |
|
"loss": 0.7879, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.837913547326813, |
|
"learning_rate": 1.5951013240217476e-05, |
|
"loss": 0.7558, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9175537997708259, |
|
"learning_rate": 1.5941894818876167e-05, |
|
"loss": 0.8003, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9341359649482986, |
|
"learning_rate": 1.593276875451267e-05, |
|
"loss": 0.9179, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8901143268702544, |
|
"learning_rate": 1.592363505886578e-05, |
|
"loss": 0.749, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.955997180286808, |
|
"learning_rate": 1.591449374368412e-05, |
|
"loss": 0.8353, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9247328217729289, |
|
"learning_rate": 1.5905344820726095e-05, |
|
"loss": 0.891, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9606738684735865, |
|
"learning_rate": 1.5896188301759915e-05, |
|
"loss": 0.7897, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9863303377461871, |
|
"learning_rate": 1.5887024198563552e-05, |
|
"loss": 0.8119, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9680881639267663, |
|
"learning_rate": 1.5877852522924733e-05, |
|
"loss": 0.8187, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9137190159154377, |
|
"learning_rate": 1.586867328664093e-05, |
|
"loss": 0.7274, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8914334634486136, |
|
"learning_rate": 1.5859486501519336e-05, |
|
"loss": 0.7296, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.918290906922166, |
|
"learning_rate": 1.5850292179376853e-05, |
|
"loss": 0.8544, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8983013443167193, |
|
"learning_rate": 1.584109033204008e-05, |
|
"loss": 0.8667, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.8783351660772043, |
|
"learning_rate": 1.58318809713453e-05, |
|
"loss": 0.8418, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.888234321129709, |
|
"learning_rate": 1.5822664109138455e-05, |
|
"loss": 0.8389, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.082130346683686, |
|
"learning_rate": 1.5813439757275136e-05, |
|
"loss": 0.8873, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9420960129507885, |
|
"learning_rate": 1.580420792762057e-05, |
|
"loss": 0.8385, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8962298028357553, |
|
"learning_rate": 1.5794968632049598e-05, |
|
"loss": 0.8424, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9622507092912542, |
|
"learning_rate": 1.5785721882446674e-05, |
|
"loss": 0.9024, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.924531664156692, |
|
"learning_rate": 1.577646769070583e-05, |
|
"loss": 0.8497, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8907729853195185, |
|
"learning_rate": 1.576720606873068e-05, |
|
"loss": 0.7535, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.922725222248796, |
|
"learning_rate": 1.5757937028434386e-05, |
|
"loss": 0.8141, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0247896956566702, |
|
"learning_rate": 1.5748660581739658e-05, |
|
"loss": 0.8645, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9683488898803148, |
|
"learning_rate": 1.5739376740578725e-05, |
|
"loss": 0.8602, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9250842111817158, |
|
"learning_rate": 1.573008551689335e-05, |
|
"loss": 0.7781, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0417883831148922, |
|
"learning_rate": 1.572078692263476e-05, |
|
"loss": 0.8281, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9067729621768262, |
|
"learning_rate": 1.5711480969763687e-05, |
|
"loss": 0.8305, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9354864669464692, |
|
"learning_rate": 1.570216767025032e-05, |
|
"loss": 0.7685, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9522717869161624, |
|
"learning_rate": 1.569284703607429e-05, |
|
"loss": 0.8501, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9356497506936815, |
|
"learning_rate": 1.568351907922468e-05, |
|
"loss": 0.7932, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.991045124767421, |
|
"learning_rate": 1.5674183811699977e-05, |
|
"loss": 0.7607, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9425625884087384, |
|
"learning_rate": 1.5664841245508083e-05, |
|
"loss": 0.8086, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9457733257889495, |
|
"learning_rate": 1.5655491392666278e-05, |
|
"loss": 0.8644, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.870259271198405, |
|
"learning_rate": 1.564613426520122e-05, |
|
"loss": 0.7676, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9439558057808078, |
|
"learning_rate": 1.5636769875148923e-05, |
|
"loss": 0.8501, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9299410524152394, |
|
"learning_rate": 1.5627398234554743e-05, |
|
"loss": 0.7952, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.97156518556245, |
|
"learning_rate": 1.5618019355473365e-05, |
|
"loss": 0.8358, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9100662530892337, |
|
"learning_rate": 1.5608633249968783e-05, |
|
"loss": 0.7722, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0511679090288089, |
|
"learning_rate": 1.5599239930114284e-05, |
|
"loss": 0.7877, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8761142337595833, |
|
"learning_rate": 1.5589839407992438e-05, |
|
"loss": 0.7341, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8990059427825337, |
|
"learning_rate": 1.5580431695695074e-05, |
|
"loss": 0.8201, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.0026432046525355, |
|
"learning_rate": 1.5571016805323277e-05, |
|
"loss": 0.867, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9429348388707998, |
|
"learning_rate": 1.5561594748987366e-05, |
|
"loss": 0.9206, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9167946059522355, |
|
"learning_rate": 1.5552165538806864e-05, |
|
"loss": 0.7893, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.1871295768545387, |
|
"learning_rate": 1.554272918691051e-05, |
|
"loss": 0.8828, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9679415215947258, |
|
"learning_rate": 1.5533285705436223e-05, |
|
"loss": 0.8616, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9402913050429529, |
|
"learning_rate": 1.55238351065311e-05, |
|
"loss": 0.8481, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0003446194946921, |
|
"learning_rate": 1.5514377402351376e-05, |
|
"loss": 0.8663, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8965443469538628, |
|
"learning_rate": 1.550491260506245e-05, |
|
"loss": 0.798, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9302594905643129, |
|
"learning_rate": 1.5495440726838823e-05, |
|
"loss": 0.8539, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.87788264580982, |
|
"learning_rate": 1.5485961779864113e-05, |
|
"loss": 0.8012, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8959993564405986, |
|
"learning_rate": 1.5476475776331038e-05, |
|
"loss": 0.8234, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.921332705440342, |
|
"learning_rate": 1.5466982728441372e-05, |
|
"loss": 0.7851, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9266184071321535, |
|
"learning_rate": 1.5457482648405974e-05, |
|
"loss": 0.8268, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.2058651665186106, |
|
"learning_rate": 1.5447975548444736e-05, |
|
"loss": 0.8695, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0440445222540657, |
|
"learning_rate": 1.5438461440786576e-05, |
|
"loss": 0.8452, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.1348865256068061, |
|
"learning_rate": 1.5428940337669437e-05, |
|
"loss": 0.8998, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9749698913377348, |
|
"learning_rate": 1.541941225134025e-05, |
|
"loss": 0.8256, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9220326956037203, |
|
"learning_rate": 1.5409877194054933e-05, |
|
"loss": 0.8253, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0765304215328484, |
|
"learning_rate": 1.540033517807838e-05, |
|
"loss": 0.8896, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9206753023806792, |
|
"learning_rate": 1.5390786215684418e-05, |
|
"loss": 0.7448, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9708651392708145, |
|
"learning_rate": 1.5381230319155814e-05, |
|
"loss": 0.9519, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.218620357778404, |
|
"learning_rate": 1.5371667500784263e-05, |
|
"loss": 0.853, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.1911032016704852, |
|
"learning_rate": 1.5362097772870363e-05, |
|
"loss": 0.9497, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9398864147894423, |
|
"learning_rate": 1.5352521147723586e-05, |
|
"loss": 0.8944, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9661960223002711, |
|
"learning_rate": 1.5342937637662293e-05, |
|
"loss": 0.7934, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8855725014661222, |
|
"learning_rate": 1.5333347255013688e-05, |
|
"loss": 0.7527, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0787207219460402, |
|
"learning_rate": 1.532375001211383e-05, |
|
"loss": 0.8123, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.0384264021628293, |
|
"learning_rate": 1.5314145921307575e-05, |
|
"loss": 0.831, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8883493942021998, |
|
"learning_rate": 1.5304534994948615e-05, |
|
"loss": 0.8752, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9373183287986955, |
|
"learning_rate": 1.5294917245399432e-05, |
|
"loss": 0.8047, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.978544050648968, |
|
"learning_rate": 1.5285292685031264e-05, |
|
"loss": 0.7891, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9645365407413318, |
|
"learning_rate": 1.5275661326224128e-05, |
|
"loss": 0.7789, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9920781081825699, |
|
"learning_rate": 1.5266023181366785e-05, |
|
"loss": 0.808, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8737892505774815, |
|
"learning_rate": 1.5256378262856717e-05, |
|
"loss": 0.8315, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.946250945761465, |
|
"learning_rate": 1.5246726583100117e-05, |
|
"loss": 0.7663, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0186205498174532, |
|
"learning_rate": 1.5237068154511892e-05, |
|
"loss": 0.8631, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0311343742793326, |
|
"learning_rate": 1.5227402989515607e-05, |
|
"loss": 0.9006, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"eval_loss": 0.8166661858558655, |
|
"eval_runtime": 187.1666, |
|
"eval_samples_per_second": 216.128, |
|
"eval_steps_per_second": 27.019, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0366177745268226, |
|
"learning_rate": 1.5217731100543507e-05, |
|
"loss": 0.8218, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9506885195850883, |
|
"learning_rate": 1.5208052500036488e-05, |
|
"loss": 0.7495, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0338066376511579, |
|
"learning_rate": 1.5198367200444067e-05, |
|
"loss": 0.7877, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9322333474694569, |
|
"learning_rate": 1.518867521422439e-05, |
|
"loss": 0.7568, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9609534097048402, |
|
"learning_rate": 1.517897655384419e-05, |
|
"loss": 0.7611, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0325720072776678, |
|
"learning_rate": 1.5169271231778799e-05, |
|
"loss": 0.7689, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9418930343657563, |
|
"learning_rate": 1.5159559260512109e-05, |
|
"loss": 0.8998, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0235955508964831, |
|
"learning_rate": 1.5149840652536574e-05, |
|
"loss": 0.8997, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9414657504967248, |
|
"learning_rate": 1.5140115420353173e-05, |
|
"loss": 0.8484, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9245083032735134, |
|
"learning_rate": 1.5130383576471415e-05, |
|
"loss": 0.7856, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9161849096789572, |
|
"learning_rate": 1.5120645133409304e-05, |
|
"loss": 0.835, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8943557174659821, |
|
"learning_rate": 1.5110900103693346e-05, |
|
"loss": 0.7928, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0426274191582412, |
|
"learning_rate": 1.510114849985851e-05, |
|
"loss": 0.9187, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9941986293419836, |
|
"learning_rate": 1.509139033444822e-05, |
|
"loss": 0.7255, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.004374685633853, |
|
"learning_rate": 1.5081625620014344e-05, |
|
"loss": 0.8519, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8963139975673067, |
|
"learning_rate": 1.5071854369117178e-05, |
|
"loss": 0.8662, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.87070455564935, |
|
"learning_rate": 1.506207659432541e-05, |
|
"loss": 0.7996, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.278643651530359, |
|
"learning_rate": 1.5052292308216143e-05, |
|
"loss": 0.8961, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.885800971500993, |
|
"learning_rate": 1.5042501523374834e-05, |
|
"loss": 0.7309, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0241069842563644, |
|
"learning_rate": 1.5032704252395315e-05, |
|
"loss": 0.8767, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9138257247149999, |
|
"learning_rate": 1.5022900507879752e-05, |
|
"loss": 0.8001, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.941823978584876, |
|
"learning_rate": 1.501309030243864e-05, |
|
"loss": 0.8897, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9469953491831391, |
|
"learning_rate": 1.5003273648690786e-05, |
|
"loss": 0.9473, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9207848899089832, |
|
"learning_rate": 1.4993450559263285e-05, |
|
"loss": 0.8003, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8926744152182166, |
|
"learning_rate": 1.4983621046791524e-05, |
|
"loss": 0.8505, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9408245436209166, |
|
"learning_rate": 1.4973785123919138e-05, |
|
"loss": 0.8341, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8629284716812735, |
|
"learning_rate": 1.4963942803298015e-05, |
|
"loss": 0.7872, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8962927330501582, |
|
"learning_rate": 1.495409409758827e-05, |
|
"loss": 0.7843, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9258411057151813, |
|
"learning_rate": 1.494423901945823e-05, |
|
"loss": 0.7379, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9798123698284505, |
|
"learning_rate": 1.4934377581584425e-05, |
|
"loss": 0.7774, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9132130466817296, |
|
"learning_rate": 1.4924509796651557e-05, |
|
"loss": 0.7349, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9640135589319957, |
|
"learning_rate": 1.4914635677352495e-05, |
|
"loss": 0.7628, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9171028168227539, |
|
"learning_rate": 1.4904755236388261e-05, |
|
"loss": 0.7603, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9418343170159471, |
|
"learning_rate": 1.4894868486468004e-05, |
|
"loss": 0.8052, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9598503981723232, |
|
"learning_rate": 1.4884975440308989e-05, |
|
"loss": 0.8606, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.006904224011919, |
|
"learning_rate": 1.4875076110636574e-05, |
|
"loss": 0.8552, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9124164328650459, |
|
"learning_rate": 1.4865170510184213e-05, |
|
"loss": 0.7951, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9550560395782298, |
|
"learning_rate": 1.4855258651693409e-05, |
|
"loss": 0.8485, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9775924030336234, |
|
"learning_rate": 1.4845340547913729e-05, |
|
"loss": 0.807, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8750397902514163, |
|
"learning_rate": 1.4835416211602771e-05, |
|
"loss": 0.7876, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8987320057700271, |
|
"learning_rate": 1.4825485655526141e-05, |
|
"loss": 0.7378, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.1034171103710384, |
|
"learning_rate": 1.4815548892457454e-05, |
|
"loss": 0.7968, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.840081865612383, |
|
"learning_rate": 1.4805605935178304e-05, |
|
"loss": 0.8786, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.003518046889941, |
|
"learning_rate": 1.479565679647826e-05, |
|
"loss": 0.8088, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9524891013440205, |
|
"learning_rate": 1.4785701489154831e-05, |
|
"loss": 0.8254, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9247188686906767, |
|
"learning_rate": 1.477574002601347e-05, |
|
"loss": 0.7715, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8565668707747258, |
|
"learning_rate": 1.4765772419867546e-05, |
|
"loss": 0.6915, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8846787014674027, |
|
"learning_rate": 1.4755798683538326e-05, |
|
"loss": 0.8063, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9727973778751486, |
|
"learning_rate": 1.4745818829854961e-05, |
|
"loss": 0.8962, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9056258797643352, |
|
"learning_rate": 1.473583287165448e-05, |
|
"loss": 0.7706, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.943869945595465, |
|
"learning_rate": 1.472584082178176e-05, |
|
"loss": 0.8285, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9194718412835111, |
|
"learning_rate": 1.4715842693089509e-05, |
|
"loss": 0.8362, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9223521065577772, |
|
"learning_rate": 1.4705838498438257e-05, |
|
"loss": 0.9504, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8443051767638792, |
|
"learning_rate": 1.469582825069634e-05, |
|
"loss": 0.7688, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0946829760149586, |
|
"learning_rate": 1.4685811962739873e-05, |
|
"loss": 0.7208, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9671049669919355, |
|
"learning_rate": 1.467578964745275e-05, |
|
"loss": 0.7694, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0050083456532306, |
|
"learning_rate": 1.4665761317726608e-05, |
|
"loss": 0.8395, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9401037736089004, |
|
"learning_rate": 1.4655726986460825e-05, |
|
"loss": 0.8505, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9271070392150399, |
|
"learning_rate": 1.4645686666562503e-05, |
|
"loss": 0.8382, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9161677661842693, |
|
"learning_rate": 1.463564037094644e-05, |
|
"loss": 0.7854, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8785511543929789, |
|
"learning_rate": 1.462558811253512e-05, |
|
"loss": 0.877, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.902800739075103, |
|
"learning_rate": 1.4615529904258705e-05, |
|
"loss": 0.7724, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.061019400757448, |
|
"learning_rate": 1.4605465759055006e-05, |
|
"loss": 0.8561, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9825523333215745, |
|
"learning_rate": 1.4595395689869464e-05, |
|
"loss": 0.8538, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9181139164136977, |
|
"learning_rate": 1.4585319709655153e-05, |
|
"loss": 0.845, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9735509559194173, |
|
"learning_rate": 1.457523783137274e-05, |
|
"loss": 0.8189, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9841500939652149, |
|
"learning_rate": 1.4565150067990483e-05, |
|
"loss": 0.8327, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8988091678342344, |
|
"learning_rate": 1.4555056432484207e-05, |
|
"loss": 0.7679, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.944014867434341, |
|
"learning_rate": 1.4544956937837298e-05, |
|
"loss": 0.8342, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.1056206144151361, |
|
"learning_rate": 1.4534851597040666e-05, |
|
"loss": 0.8465, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9660105954153827, |
|
"learning_rate": 1.452474042309275e-05, |
|
"loss": 0.8517, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0472463649236523, |
|
"learning_rate": 1.451462342899949e-05, |
|
"loss": 0.8548, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9762588730513893, |
|
"learning_rate": 1.4504500627774313e-05, |
|
"loss": 0.8829, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9489649675731793, |
|
"learning_rate": 1.4494372032438116e-05, |
|
"loss": 0.7979, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.990120487046462, |
|
"learning_rate": 1.4484237656019244e-05, |
|
"loss": 0.8817, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9075790189381905, |
|
"learning_rate": 1.447409751155349e-05, |
|
"loss": 0.7941, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8831467966088242, |
|
"learning_rate": 1.4463951612084048e-05, |
|
"loss": 0.8214, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0125327855835362, |
|
"learning_rate": 1.445379997066153e-05, |
|
"loss": 0.7625, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9101408924011467, |
|
"learning_rate": 1.4443642600343932e-05, |
|
"loss": 0.7662, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9019559452891296, |
|
"learning_rate": 1.4433479514196615e-05, |
|
"loss": 0.7902, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9207943304282776, |
|
"learning_rate": 1.4423310725292288e-05, |
|
"loss": 0.875, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.929234724837283, |
|
"learning_rate": 1.4413136246711008e-05, |
|
"loss": 0.8082, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8907862985352285, |
|
"learning_rate": 1.4402956091540137e-05, |
|
"loss": 0.8322, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.1412941737168578, |
|
"learning_rate": 1.4392770272874344e-05, |
|
"loss": 0.9219, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9762146877555116, |
|
"learning_rate": 1.4382578803815592e-05, |
|
"loss": 0.8423, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9177873129682679, |
|
"learning_rate": 1.4372381697473098e-05, |
|
"loss": 0.8468, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9079263274942797, |
|
"learning_rate": 1.4362178966963335e-05, |
|
"loss": 0.85, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9099542658185245, |
|
"learning_rate": 1.435197062541002e-05, |
|
"loss": 0.8523, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9510183315678092, |
|
"learning_rate": 1.4341756685944069e-05, |
|
"loss": 0.84, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9423797325098769, |
|
"learning_rate": 1.4331537161703612e-05, |
|
"loss": 0.9125, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9666830124658934, |
|
"learning_rate": 1.4321312065833961e-05, |
|
"loss": 0.8334, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.050044401434885, |
|
"learning_rate": 1.4311081411487593e-05, |
|
"loss": 0.818, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.971299073600437, |
|
"learning_rate": 1.4300845211824133e-05, |
|
"loss": 0.7817, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.0622022924767303, |
|
"learning_rate": 1.4290603480010337e-05, |
|
"loss": 0.8204, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8940801885856055, |
|
"learning_rate": 1.428035622922009e-05, |
|
"loss": 0.8421, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9711244830016025, |
|
"learning_rate": 1.427010347263436e-05, |
|
"loss": 0.7441, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9108635630363782, |
|
"learning_rate": 1.4259845223441201e-05, |
|
"loss": 0.8546, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9957713741461913, |
|
"learning_rate": 1.4249581494835744e-05, |
|
"loss": 0.7939, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.947865938918299, |
|
"learning_rate": 1.4239312300020155e-05, |
|
"loss": 0.9002, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.955212444373132, |
|
"learning_rate": 1.422903765220363e-05, |
|
"loss": 0.8589, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"eval_loss": 0.8088098764419556, |
|
"eval_runtime": 187.3476, |
|
"eval_samples_per_second": 215.919, |
|
"eval_steps_per_second": 26.993, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8964662083850382, |
|
"learning_rate": 1.4218757564602395e-05, |
|
"loss": 0.8541, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8530014267313194, |
|
"learning_rate": 1.4208472050439653e-05, |
|
"loss": 0.7937, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9305448294362012, |
|
"learning_rate": 1.4198181122945596e-05, |
|
"loss": 0.8638, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0396841586497925, |
|
"learning_rate": 1.4187884795357385e-05, |
|
"loss": 0.9494, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9137449553212285, |
|
"learning_rate": 1.4177583080919124e-05, |
|
"loss": 0.8144, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0190854848529411, |
|
"learning_rate": 1.4167275992881835e-05, |
|
"loss": 0.928, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9353683263098286, |
|
"learning_rate": 1.415696354450347e-05, |
|
"loss": 0.7958, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9209680060709873, |
|
"learning_rate": 1.4146645749048861e-05, |
|
"loss": 0.8689, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.898167043125677, |
|
"learning_rate": 1.4136322619789729e-05, |
|
"loss": 0.882, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0433577130243574, |
|
"learning_rate": 1.4125994170004644e-05, |
|
"loss": 0.8298, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8853306203175052, |
|
"learning_rate": 1.4115660412979033e-05, |
|
"loss": 0.8184, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9242770972057193, |
|
"learning_rate": 1.410532136200514e-05, |
|
"loss": 0.833, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8551822597963529, |
|
"learning_rate": 1.409497703038202e-05, |
|
"loss": 0.8164, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9074799292741689, |
|
"learning_rate": 1.4084627431415525e-05, |
|
"loss": 0.7843, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9030612446473509, |
|
"learning_rate": 1.407427257841828e-05, |
|
"loss": 0.8201, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8554120431464576, |
|
"learning_rate": 1.4063912484709663e-05, |
|
"loss": 0.7776, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9501159195331209, |
|
"learning_rate": 1.4053547163615803e-05, |
|
"loss": 0.8334, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8938629752691346, |
|
"learning_rate": 1.4043176628469545e-05, |
|
"loss": 0.8048, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.0428868146721986, |
|
"learning_rate": 1.4032800892610446e-05, |
|
"loss": 0.8879, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9516786058647616, |
|
"learning_rate": 1.402241996938475e-05, |
|
"loss": 0.8268, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9922315164671542, |
|
"learning_rate": 1.401203387214537e-05, |
|
"loss": 0.7855, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9060023281261937, |
|
"learning_rate": 1.4001642614251881e-05, |
|
"loss": 0.8324, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9601763856903994, |
|
"learning_rate": 1.3991246209070493e-05, |
|
"loss": 0.8077, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9577849169044219, |
|
"learning_rate": 1.3980844669974036e-05, |
|
"loss": 0.8355, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8960036099129043, |
|
"learning_rate": 1.3970438010341945e-05, |
|
"loss": 0.8218, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9461892853385439, |
|
"learning_rate": 1.3960026243560243e-05, |
|
"loss": 0.8302, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9086947040687626, |
|
"learning_rate": 1.3949609383021518e-05, |
|
"loss": 0.7887, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0253390198732695, |
|
"learning_rate": 1.3939187442124915e-05, |
|
"loss": 0.7914, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.039432871340379, |
|
"learning_rate": 1.3928760434276108e-05, |
|
"loss": 0.8485, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8970385765648137, |
|
"learning_rate": 1.3918328372887295e-05, |
|
"loss": 0.8225, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8726470987900731, |
|
"learning_rate": 1.3907891271377174e-05, |
|
"loss": 0.8193, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.3511389366858273, |
|
"learning_rate": 1.3897449143170915e-05, |
|
"loss": 0.7433, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9003522292869762, |
|
"learning_rate": 1.3887002001700171e-05, |
|
"loss": 0.7925, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9604476147261559, |
|
"learning_rate": 1.3876549860403034e-05, |
|
"loss": 0.7373, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8931730019645792, |
|
"learning_rate": 1.3866092732724022e-05, |
|
"loss": 0.8209, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9533626103452278, |
|
"learning_rate": 1.3855630632114077e-05, |
|
"loss": 0.8119, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9210708748170129, |
|
"learning_rate": 1.3845163572030533e-05, |
|
"loss": 0.8493, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.88266502750559, |
|
"learning_rate": 1.3834691565937104e-05, |
|
"loss": 0.7441, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9714788778711877, |
|
"learning_rate": 1.3824214627303865e-05, |
|
"loss": 0.9199, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.93445787773671, |
|
"learning_rate": 1.381373276960724e-05, |
|
"loss": 0.8291, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9064124219434208, |
|
"learning_rate": 1.3803246006329975e-05, |
|
"loss": 0.7146, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8611879842958952, |
|
"learning_rate": 1.3792754350961126e-05, |
|
"loss": 0.7505, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8957401093812204, |
|
"learning_rate": 1.3782257816996047e-05, |
|
"loss": 0.8717, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9580044205886977, |
|
"learning_rate": 1.3771756417936363e-05, |
|
"loss": 0.7724, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8208381090776238, |
|
"learning_rate": 1.376125016728996e-05, |
|
"loss": 0.7815, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8539973246995288, |
|
"learning_rate": 1.3750739078570958e-05, |
|
"loss": 0.7544, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0014874732651948, |
|
"learning_rate": 1.3740223165299708e-05, |
|
"loss": 0.849, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9126804758407212, |
|
"learning_rate": 1.3729702441002763e-05, |
|
"loss": 0.8151, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9687131763677577, |
|
"learning_rate": 1.3719176919212865e-05, |
|
"loss": 0.9047, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.969129888421451, |
|
"learning_rate": 1.3708646613468925e-05, |
|
"loss": 0.8256, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9653248623855282, |
|
"learning_rate": 1.3698111537316013e-05, |
|
"loss": 0.8613, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9477030668227989, |
|
"learning_rate": 1.3687571704305329e-05, |
|
"loss": 0.8741, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9467403055169439, |
|
"learning_rate": 1.3677027127994195e-05, |
|
"loss": 0.8158, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8881445660059051, |
|
"learning_rate": 1.3666477821946036e-05, |
|
"loss": 0.7622, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9823672949148402, |
|
"learning_rate": 1.3655923799730355e-05, |
|
"loss": 0.8495, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9247600537788654, |
|
"learning_rate": 1.3645365074922728e-05, |
|
"loss": 0.793, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9356835238185092, |
|
"learning_rate": 1.3634801661104771e-05, |
|
"loss": 0.8562, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8899179313721481, |
|
"learning_rate": 1.3624233571864143e-05, |
|
"loss": 0.8039, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8907403674153812, |
|
"learning_rate": 1.361366082079451e-05, |
|
"loss": 0.7767, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8609601412851831, |
|
"learning_rate": 1.3603083421495535e-05, |
|
"loss": 0.757, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.854048536932743, |
|
"learning_rate": 1.3592501387572858e-05, |
|
"loss": 0.8074, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8583038832209344, |
|
"learning_rate": 1.3581914732638088e-05, |
|
"loss": 0.7514, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9102036881133101, |
|
"learning_rate": 1.3571323470308772e-05, |
|
"loss": 0.879, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9892591027955606, |
|
"learning_rate": 1.3560727614208382e-05, |
|
"loss": 0.7995, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8587977399936928, |
|
"learning_rate": 1.3550127177966306e-05, |
|
"loss": 0.7842, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8595666226056676, |
|
"learning_rate": 1.3539522175217815e-05, |
|
"loss": 0.848, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9459028126824949, |
|
"learning_rate": 1.3528912619604055e-05, |
|
"loss": 0.8037, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9332031814722181, |
|
"learning_rate": 1.3518298524772037e-05, |
|
"loss": 0.7559, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9494114970027568, |
|
"learning_rate": 1.3507679904374605e-05, |
|
"loss": 0.8466, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8704421497086452, |
|
"learning_rate": 1.3497056772070417e-05, |
|
"loss": 0.8155, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.918802130165072, |
|
"learning_rate": 1.3486429141523952e-05, |
|
"loss": 0.7543, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9429001916650661, |
|
"learning_rate": 1.3475797026405455e-05, |
|
"loss": 0.8587, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9468204443661803, |
|
"learning_rate": 1.3465160440390956e-05, |
|
"loss": 0.8496, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9994607399005049, |
|
"learning_rate": 1.3454519397162224e-05, |
|
"loss": 0.8622, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.1004175782584324, |
|
"learning_rate": 1.3443873910406773e-05, |
|
"loss": 0.9329, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.0020293054378908, |
|
"learning_rate": 1.3433223993817821e-05, |
|
"loss": 0.8327, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9863748418961857, |
|
"learning_rate": 1.3422569661094292e-05, |
|
"loss": 0.8051, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9320271692137702, |
|
"learning_rate": 1.3411910925940785e-05, |
|
"loss": 0.79, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9135413110414086, |
|
"learning_rate": 1.3401247802067565e-05, |
|
"loss": 0.884, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.052077980452545, |
|
"learning_rate": 1.3390580303190541e-05, |
|
"loss": 0.8115, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9349164080270702, |
|
"learning_rate": 1.3379908443031253e-05, |
|
"loss": 0.7915, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.2324139315928029, |
|
"learning_rate": 1.3369232235316847e-05, |
|
"loss": 0.8856, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9594440807814407, |
|
"learning_rate": 1.335855169378006e-05, |
|
"loss": 0.8418, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9197962337492925, |
|
"learning_rate": 1.3347866832159206e-05, |
|
"loss": 0.783, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9305460243082693, |
|
"learning_rate": 1.3337177664198162e-05, |
|
"loss": 0.782, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9373693721632484, |
|
"learning_rate": 1.3326484203646333e-05, |
|
"loss": 0.8484, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.916846515507665, |
|
"learning_rate": 1.3315786464258647e-05, |
|
"loss": 0.8364, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.3099634206251927, |
|
"learning_rate": 1.3305084459795547e-05, |
|
"loss": 0.86, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9211071839446942, |
|
"learning_rate": 1.3294378204022948e-05, |
|
"loss": 0.8358, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9267300515820535, |
|
"learning_rate": 1.3283667710712245e-05, |
|
"loss": 0.7812, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9469209502966243, |
|
"learning_rate": 1.3272952993640276e-05, |
|
"loss": 0.8247, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8804259262659825, |
|
"learning_rate": 1.3262234066589315e-05, |
|
"loss": 0.7344, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9132281206728566, |
|
"learning_rate": 1.3251510943347049e-05, |
|
"loss": 0.7078, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9892610920445006, |
|
"learning_rate": 1.3240783637706566e-05, |
|
"loss": 0.8737, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.1053484676015917, |
|
"learning_rate": 1.3230052163466337e-05, |
|
"loss": 0.789, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8906000246500937, |
|
"learning_rate": 1.321931653443018e-05, |
|
"loss": 0.8797, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8979403780314781, |
|
"learning_rate": 1.3208576764407277e-05, |
|
"loss": 0.7897, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.123038434121297, |
|
"learning_rate": 1.3197832867212123e-05, |
|
"loss": 0.8288, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8854053827854093, |
|
"learning_rate": 1.3187084856664524e-05, |
|
"loss": 0.7818, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8361990472972701, |
|
"learning_rate": 1.3176332746589587e-05, |
|
"loss": 0.7635, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 0.8016307353973389, |
|
"eval_runtime": 187.26, |
|
"eval_samples_per_second": 216.02, |
|
"eval_steps_per_second": 27.005, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.909024655853674, |
|
"learning_rate": 1.3165576550817674e-05, |
|
"loss": 0.8462, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8313849727666298, |
|
"learning_rate": 1.3154816283184416e-05, |
|
"loss": 0.7439, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.9042935007013845, |
|
"learning_rate": 1.314405195753068e-05, |
|
"loss": 0.8269, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.9956573032713342, |
|
"learning_rate": 1.313328358770255e-05, |
|
"loss": 0.8411, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8692638810097056, |
|
"learning_rate": 1.3122511187551313e-05, |
|
"loss": 0.7821, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8861664890808165, |
|
"learning_rate": 1.3111734770933435e-05, |
|
"loss": 0.8507, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.010454663093981, |
|
"learning_rate": 1.3100954351710561e-05, |
|
"loss": 0.87, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.9296535384437415, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 0.8421, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.8936969609941104, |
|
"learning_rate": 1.307938156092209e-05, |
|
"loss": 0.8661, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.9227907157257895, |
|
"learning_rate": 1.3068589217105441e-05, |
|
"loss": 0.7934, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.9335664869611315, |
|
"learning_rate": 1.3057792926181648e-05, |
|
"loss": 0.7751, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.9019312696351173, |
|
"learning_rate": 1.3046992702037912e-05, |
|
"loss": 0.8335, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8899699042126639, |
|
"learning_rate": 1.3036188558566498e-05, |
|
"loss": 0.7718, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8694994902176645, |
|
"learning_rate": 1.3025380509664704e-05, |
|
"loss": 0.8224, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8679155573621159, |
|
"learning_rate": 1.3014568569234855e-05, |
|
"loss": 0.793, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8731683514157172, |
|
"learning_rate": 1.300375275118429e-05, |
|
"loss": 0.7145, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.9617633270628331, |
|
"learning_rate": 1.2992933069425322e-05, |
|
"loss": 0.8323, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.9205318690136027, |
|
"learning_rate": 1.2982109537875242e-05, |
|
"loss": 0.7919, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8720538824550501, |
|
"learning_rate": 1.2971282170456287e-05, |
|
"loss": 0.739, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.9832029171959736, |
|
"learning_rate": 1.2960450981095643e-05, |
|
"loss": 0.8244, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8770451456283266, |
|
"learning_rate": 1.2949615983725386e-05, |
|
"loss": 0.7719, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8840598304118039, |
|
"learning_rate": 1.2938777192282516e-05, |
|
"loss": 0.8434, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.9055448234498211, |
|
"learning_rate": 1.2927934620708902e-05, |
|
"loss": 0.8374, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8729195849353666, |
|
"learning_rate": 1.291708828295127e-05, |
|
"loss": 0.774, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.9284381671456617, |
|
"learning_rate": 1.2906238192961201e-05, |
|
"loss": 0.9232, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.8999733124576376, |
|
"learning_rate": 1.28953843646951e-05, |
|
"loss": 0.8288, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.9245147181563812, |
|
"learning_rate": 1.2884526812114176e-05, |
|
"loss": 0.8119, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8530109921661233, |
|
"learning_rate": 1.2873665549184429e-05, |
|
"loss": 0.7753, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.9043600405219798, |
|
"learning_rate": 1.2862800589876636e-05, |
|
"loss": 0.8343, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8830059417691293, |
|
"learning_rate": 1.2851931948166328e-05, |
|
"loss": 0.7843, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.9418973868303533, |
|
"learning_rate": 1.2841059638033765e-05, |
|
"loss": 0.7896, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.9100590025127928, |
|
"learning_rate": 1.2830183673463941e-05, |
|
"loss": 0.7616, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8495301196659513, |
|
"learning_rate": 1.2819304068446532e-05, |
|
"loss": 0.7689, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8780282740804941, |
|
"learning_rate": 1.280842083697591e-05, |
|
"loss": 0.7213, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8601052861886926, |
|
"learning_rate": 1.2797533993051108e-05, |
|
"loss": 0.8038, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.873800855570753, |
|
"learning_rate": 1.2786643550675806e-05, |
|
"loss": 0.9166, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.872977358615432, |
|
"learning_rate": 1.277574952385831e-05, |
|
"loss": 0.7841, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.9348029069693068, |
|
"learning_rate": 1.2764851926611538e-05, |
|
"loss": 0.8556, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.9177313188133553, |
|
"learning_rate": 1.2753950772953006e-05, |
|
"loss": 0.8374, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8880377226520811, |
|
"learning_rate": 1.2743046076904795e-05, |
|
"loss": 0.807, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8581317781991079, |
|
"learning_rate": 1.2732137852493548e-05, |
|
"loss": 0.7839, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9423518364761677, |
|
"learning_rate": 1.2721226113750449e-05, |
|
"loss": 0.9337, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.8697716259429181, |
|
"learning_rate": 1.2710310874711195e-05, |
|
"loss": 0.7732, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.030589559664667, |
|
"learning_rate": 1.2699392149415992e-05, |
|
"loss": 0.8249, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9171711323829616, |
|
"learning_rate": 1.268846995190953e-05, |
|
"loss": 0.8395, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.8315506767883373, |
|
"learning_rate": 1.2677544296240963e-05, |
|
"loss": 0.7798, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.20140255611588, |
|
"learning_rate": 1.2666615196463887e-05, |
|
"loss": 0.8241, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9073000748254678, |
|
"learning_rate": 1.2655682666636346e-05, |
|
"loss": 0.854, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9417372875604748, |
|
"learning_rate": 1.2644746720820778e-05, |
|
"loss": 0.9372, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9290306561838421, |
|
"learning_rate": 1.2633807373084022e-05, |
|
"loss": 0.8309, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9534748535916682, |
|
"learning_rate": 1.262286463749729e-05, |
|
"loss": 0.906, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.8950263175691467, |
|
"learning_rate": 1.261191852813616e-05, |
|
"loss": 0.8991, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.9129258423461022, |
|
"learning_rate": 1.2600969059080543e-05, |
|
"loss": 0.8243, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.8634972435621588, |
|
"learning_rate": 1.2590016244414674e-05, |
|
"loss": 0.781, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.903443853461887, |
|
"learning_rate": 1.2579060098227086e-05, |
|
"loss": 0.8297, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.946311697477589, |
|
"learning_rate": 1.25681006346106e-05, |
|
"loss": 0.8786, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.8728360680570422, |
|
"learning_rate": 1.255713786766231e-05, |
|
"loss": 0.7831, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9486738553484381, |
|
"learning_rate": 1.2546171811483555e-05, |
|
"loss": 0.7885, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.852399743727923, |
|
"learning_rate": 1.2535202480179902e-05, |
|
"loss": 0.7437, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.8446167680879512, |
|
"learning_rate": 1.2524229887861132e-05, |
|
"loss": 0.8096, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9119806793650314, |
|
"learning_rate": 1.2513254048641228e-05, |
|
"loss": 0.835, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9060150023697798, |
|
"learning_rate": 1.2502274976638337e-05, |
|
"loss": 0.8107, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9178164241596967, |
|
"learning_rate": 1.2491292685974775e-05, |
|
"loss": 0.8105, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9694234877661287, |
|
"learning_rate": 1.2480307190776994e-05, |
|
"loss": 0.9413, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.054215876059854, |
|
"learning_rate": 1.2469318505175564e-05, |
|
"loss": 0.8096, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9302411914965589, |
|
"learning_rate": 1.2458326643305162e-05, |
|
"loss": 0.836, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.9092781470891533, |
|
"learning_rate": 1.244733161930456e-05, |
|
"loss": 0.9085, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.0185778379552477, |
|
"learning_rate": 1.2436333447316578e-05, |
|
"loss": 0.8225, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.008752810569094, |
|
"learning_rate": 1.2425332141488103e-05, |
|
"loss": 0.7972, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.9508043331539013, |
|
"learning_rate": 1.241432771597004e-05, |
|
"loss": 0.7727, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.9081696570803016, |
|
"learning_rate": 1.240332018491732e-05, |
|
"loss": 0.8256, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.9184233825599799, |
|
"learning_rate": 1.2392309562488857e-05, |
|
"loss": 0.771, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.8776440619902922, |
|
"learning_rate": 1.2381295862847546e-05, |
|
"loss": 0.7685, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.900259780582409, |
|
"learning_rate": 1.2370279100160243e-05, |
|
"loss": 0.7493, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.0195338622101757, |
|
"learning_rate": 1.235925928859774e-05, |
|
"loss": 0.8172, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.9777459968555523, |
|
"learning_rate": 1.2348236442334747e-05, |
|
"loss": 0.8224, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.8132773288609149, |
|
"learning_rate": 1.2337210575549885e-05, |
|
"loss": 0.7546, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.978595342667741, |
|
"learning_rate": 1.2326181702425662e-05, |
|
"loss": 0.7756, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.916777195194068, |
|
"learning_rate": 1.2315149837148441e-05, |
|
"loss": 0.7982, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.981152620310292, |
|
"learning_rate": 1.230411499390845e-05, |
|
"loss": 0.7198, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.8969732111862679, |
|
"learning_rate": 1.229307718689973e-05, |
|
"loss": 0.7637, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.8998245989558533, |
|
"learning_rate": 1.2282036430320153e-05, |
|
"loss": 0.8069, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.9532879266081471, |
|
"learning_rate": 1.2270992738371363e-05, |
|
"loss": 0.7618, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.8787836684734067, |
|
"learning_rate": 1.2259946125258804e-05, |
|
"loss": 0.7614, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9396395297610475, |
|
"learning_rate": 1.224889660519166e-05, |
|
"loss": 0.805, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8943880825019749, |
|
"learning_rate": 1.223784419238286e-05, |
|
"loss": 0.7378, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8440758161057578, |
|
"learning_rate": 1.2226788901049052e-05, |
|
"loss": 0.7209, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9473861048709283, |
|
"learning_rate": 1.221573074541059e-05, |
|
"loss": 0.7762, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8593533374009497, |
|
"learning_rate": 1.2204669739691508e-05, |
|
"loss": 0.7285, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8949062108663332, |
|
"learning_rate": 1.2193605898119513e-05, |
|
"loss": 0.8688, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9090979861560906, |
|
"learning_rate": 1.2182539234925954e-05, |
|
"loss": 0.8169, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.0789848804501179, |
|
"learning_rate": 1.2171469764345807e-05, |
|
"loss": 0.8544, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9176123781134722, |
|
"learning_rate": 1.216039750061767e-05, |
|
"loss": 0.8013, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8972025672926027, |
|
"learning_rate": 1.2149322457983719e-05, |
|
"loss": 0.7344, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9696364696159007, |
|
"learning_rate": 1.2138244650689714e-05, |
|
"loss": 0.8064, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.010581794626841, |
|
"learning_rate": 1.2127164092984976e-05, |
|
"loss": 0.8848, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.9026898328330966, |
|
"learning_rate": 1.2116080799122349e-05, |
|
"loss": 0.7996, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.1285869333851108, |
|
"learning_rate": 1.2104994783358206e-05, |
|
"loss": 0.9334, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.819826433384388, |
|
"learning_rate": 1.2093906059952425e-05, |
|
"loss": 0.7214, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.017003481795163, |
|
"learning_rate": 1.2082814643168357e-05, |
|
"loss": 0.7696, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"eval_loss": 0.795089840888977, |
|
"eval_runtime": 187.5096, |
|
"eval_samples_per_second": 215.733, |
|
"eval_steps_per_second": 26.969, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.8798817267157177, |
|
"learning_rate": 1.2071720547272823e-05, |
|
"loss": 0.7564, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.90666438626132, |
|
"learning_rate": 1.2060623786536086e-05, |
|
"loss": 0.8346, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.8789513496907703, |
|
"learning_rate": 1.2049524375231848e-05, |
|
"loss": 0.817, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.9112276995271548, |
|
"learning_rate": 1.2038422327637208e-05, |
|
"loss": 0.8376, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.9175604567901359, |
|
"learning_rate": 1.2027317658032655e-05, |
|
"loss": 0.8326, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.8946435401851419, |
|
"learning_rate": 1.2016210380702068e-05, |
|
"loss": 0.7805, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.9434366572160074, |
|
"learning_rate": 1.2005100509932662e-05, |
|
"loss": 0.7713, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.9641160978432078, |
|
"learning_rate": 1.199398806001499e-05, |
|
"loss": 0.8679, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.9441666498195864, |
|
"learning_rate": 1.1982873045242933e-05, |
|
"loss": 0.8775, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.8583055752214402, |
|
"learning_rate": 1.1971755479913665e-05, |
|
"loss": 0.8123, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.9304765536666078, |
|
"learning_rate": 1.1960635378327636e-05, |
|
"loss": 0.7299, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.8234687024912583, |
|
"learning_rate": 1.194951275478857e-05, |
|
"loss": 0.7957, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.8546894141514437, |
|
"learning_rate": 1.1938387623603425e-05, |
|
"loss": 0.8257, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.8632822306104739, |
|
"learning_rate": 1.1927259999082386e-05, |
|
"loss": 0.7314, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.896871727091561, |
|
"learning_rate": 1.1916129895538847e-05, |
|
"loss": 0.7087, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.9393014655617864, |
|
"learning_rate": 1.1904997327289391e-05, |
|
"loss": 0.728, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.8769371873165606, |
|
"learning_rate": 1.1893862308653777e-05, |
|
"loss": 0.736, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.8852596329458032, |
|
"learning_rate": 1.1882724853954902e-05, |
|
"loss": 0.7668, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.8617739696699027, |
|
"learning_rate": 1.1871584977518806e-05, |
|
"loss": 0.7659, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.9314434034383864, |
|
"learning_rate": 1.1860442693674648e-05, |
|
"loss": 0.8394, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.861142003308729, |
|
"learning_rate": 1.1849298016754669e-05, |
|
"loss": 0.7768, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.8769070771121391, |
|
"learning_rate": 1.1838150961094204e-05, |
|
"loss": 0.8732, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.8603644023677578, |
|
"learning_rate": 1.182700154103164e-05, |
|
"loss": 0.6549, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.9638201294806444, |
|
"learning_rate": 1.1815849770908403e-05, |
|
"loss": 0.8464, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.8306462320605537, |
|
"learning_rate": 1.180469566506895e-05, |
|
"loss": 0.7991, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.9863044155217197, |
|
"learning_rate": 1.1793539237860738e-05, |
|
"loss": 0.8572, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.9702494386049709, |
|
"learning_rate": 1.1782380503634203e-05, |
|
"loss": 1.0178, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.8563480076569994, |
|
"learning_rate": 1.177121947674276e-05, |
|
"loss": 0.8421, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9823180165930772, |
|
"learning_rate": 1.1760056171542773e-05, |
|
"loss": 0.821, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.906116648047096, |
|
"learning_rate": 1.1748890602393521e-05, |
|
"loss": 0.7529, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.8593895588191588, |
|
"learning_rate": 1.1737722783657213e-05, |
|
"loss": 0.8545, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.8499393110247033, |
|
"learning_rate": 1.172655272969894e-05, |
|
"loss": 0.8143, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9164004760981641, |
|
"learning_rate": 1.171538045488668e-05, |
|
"loss": 0.78, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9122326263339436, |
|
"learning_rate": 1.170420597359125e-05, |
|
"loss": 0.8505, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9549519243654508, |
|
"learning_rate": 1.1693029300186324e-05, |
|
"loss": 0.7455, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.0516468035890543, |
|
"learning_rate": 1.1681850449048387e-05, |
|
"loss": 0.8577, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.8758025212436614, |
|
"learning_rate": 1.1670669434556716e-05, |
|
"loss": 0.8231, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.8884706110774874, |
|
"learning_rate": 1.1659486271093392e-05, |
|
"loss": 0.8108, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9218324642854692, |
|
"learning_rate": 1.1648300973043245e-05, |
|
"loss": 0.7995, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.8654985510951089, |
|
"learning_rate": 1.1637113554793846e-05, |
|
"loss": 0.7501, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.8380612233059123, |
|
"learning_rate": 1.1625924030735511e-05, |
|
"loss": 0.686, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8848563947968523, |
|
"learning_rate": 1.161473241526125e-05, |
|
"loss": 0.8025, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8821522053418632, |
|
"learning_rate": 1.1603538722766767e-05, |
|
"loss": 0.7701, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8692396513709321, |
|
"learning_rate": 1.1592342967650445e-05, |
|
"loss": 0.831, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8726825340572187, |
|
"learning_rate": 1.1581145164313307e-05, |
|
"loss": 0.8207, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.9591798533346475, |
|
"learning_rate": 1.1569945327159023e-05, |
|
"loss": 0.8628, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.9596109009779479, |
|
"learning_rate": 1.155874347059387e-05, |
|
"loss": 0.814, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.9027226890146384, |
|
"learning_rate": 1.1547539609026726e-05, |
|
"loss": 0.8522, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8971455922709491, |
|
"learning_rate": 1.153633375686905e-05, |
|
"loss": 0.7769, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.9048377154051395, |
|
"learning_rate": 1.152512592853486e-05, |
|
"loss": 0.7424, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8397557960034163, |
|
"learning_rate": 1.1513916138440712e-05, |
|
"loss": 0.7782, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8503308719220157, |
|
"learning_rate": 1.1502704401005694e-05, |
|
"loss": 0.7616, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8467495732273495, |
|
"learning_rate": 1.1491490730651389e-05, |
|
"loss": 0.7797, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.899994053064031, |
|
"learning_rate": 1.1480275141801873e-05, |
|
"loss": 0.7726, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.8583068406846777, |
|
"learning_rate": 1.146905764888369e-05, |
|
"loss": 0.7654, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.9301024028790893, |
|
"learning_rate": 1.1457838266325828e-05, |
|
"loss": 0.7648, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8523423465764791, |
|
"learning_rate": 1.1446617008559713e-05, |
|
"loss": 0.7613, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9179001752750603, |
|
"learning_rate": 1.1435393890019173e-05, |
|
"loss": 0.7407, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8807224051681771, |
|
"learning_rate": 1.1424168925140438e-05, |
|
"loss": 0.7672, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9213553252352924, |
|
"learning_rate": 1.141294212836211e-05, |
|
"loss": 0.7772, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9143784577248558, |
|
"learning_rate": 1.1401713514125152e-05, |
|
"loss": 0.7812, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8653978990179322, |
|
"learning_rate": 1.1390483096872851e-05, |
|
"loss": 0.8557, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.0061464612362694, |
|
"learning_rate": 1.137925089105083e-05, |
|
"loss": 0.926, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.1393025054238606, |
|
"learning_rate": 1.1368016911107004e-05, |
|
"loss": 0.9341, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9080538249481118, |
|
"learning_rate": 1.1356781171491569e-05, |
|
"loss": 0.8169, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8933884113869166, |
|
"learning_rate": 1.1345543686656989e-05, |
|
"loss": 0.8321, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8700460854854123, |
|
"learning_rate": 1.1334304471057968e-05, |
|
"loss": 0.7425, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.9531764227643872, |
|
"learning_rate": 1.1323063539151442e-05, |
|
"loss": 0.8056, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.902921855333192, |
|
"learning_rate": 1.1311820905396547e-05, |
|
"loss": 0.878, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.8272338173452627, |
|
"learning_rate": 1.1300576584254617e-05, |
|
"loss": 0.7549, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.8555435739973453, |
|
"learning_rate": 1.1289330590189152e-05, |
|
"loss": 0.8163, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.895793435047801, |
|
"learning_rate": 1.12780829376658e-05, |
|
"loss": 0.8736, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.8643206423346252, |
|
"learning_rate": 1.126683364115235e-05, |
|
"loss": 0.813, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9454856642959599, |
|
"learning_rate": 1.1255582715118701e-05, |
|
"loss": 0.7898, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.8681278075027504, |
|
"learning_rate": 1.1244330174036843e-05, |
|
"loss": 0.6867, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9328868275202035, |
|
"learning_rate": 1.123307603238086e-05, |
|
"loss": 0.824, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9073248755158357, |
|
"learning_rate": 1.1221820304626878e-05, |
|
"loss": 0.8881, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9364571025151188, |
|
"learning_rate": 1.1210563005253067e-05, |
|
"loss": 0.8359, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9404626028617894, |
|
"learning_rate": 1.1199304148739627e-05, |
|
"loss": 0.8087, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.260358311619401, |
|
"learning_rate": 1.1188043749568752e-05, |
|
"loss": 0.7809, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9276597929451739, |
|
"learning_rate": 1.117678182222462e-05, |
|
"loss": 0.8202, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.8692262060914322, |
|
"learning_rate": 1.1165518381193379e-05, |
|
"loss": 0.7998, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9063084872578175, |
|
"learning_rate": 1.115425344096313e-05, |
|
"loss": 0.8227, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9570539636008829, |
|
"learning_rate": 1.1142987016023887e-05, |
|
"loss": 0.7911, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.9148160271160868, |
|
"learning_rate": 1.1131719120867584e-05, |
|
"loss": 0.8319, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8706583871071989, |
|
"learning_rate": 1.1120449769988043e-05, |
|
"loss": 0.778, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8691907155679381, |
|
"learning_rate": 1.1109178977880966e-05, |
|
"loss": 0.7964, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8493717758867598, |
|
"learning_rate": 1.1097906759043896e-05, |
|
"loss": 0.6965, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.9992890399936691, |
|
"learning_rate": 1.108663312797622e-05, |
|
"loss": 0.834, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8756559048622335, |
|
"learning_rate": 1.1075358099179136e-05, |
|
"loss": 0.7885, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8429751025054221, |
|
"learning_rate": 1.1064081687155647e-05, |
|
"loss": 0.7318, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8655765925457657, |
|
"learning_rate": 1.1052803906410531e-05, |
|
"loss": 0.7906, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8853165227750313, |
|
"learning_rate": 1.1041524771450329e-05, |
|
"loss": 0.8022, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8899368333997295, |
|
"learning_rate": 1.1030244296783316e-05, |
|
"loss": 0.8456, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8465449912567415, |
|
"learning_rate": 1.10189624969195e-05, |
|
"loss": 0.7837, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.9150904249953385, |
|
"learning_rate": 1.1007679386370591e-05, |
|
"loss": 0.8145, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.9457916211995286, |
|
"learning_rate": 1.0996394979649977e-05, |
|
"loss": 0.8615, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.823661666782617, |
|
"learning_rate": 1.0985109291272729e-05, |
|
"loss": 0.7733, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.0025967815723742, |
|
"learning_rate": 1.0973822335755551e-05, |
|
"loss": 0.7727, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.0641070270245732, |
|
"learning_rate": 1.0962534127616784e-05, |
|
"loss": 0.8476, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"eval_loss": 0.7878583073616028, |
|
"eval_runtime": 187.2349, |
|
"eval_samples_per_second": 216.049, |
|
"eval_steps_per_second": 27.009, |
|
"step": 1400 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2870, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"total_flos": 1.6525916175020851e+19, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|