|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 1911, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.4482758620689656e-07, |
|
"loss": 4.9648, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.896551724137931e-07, |
|
"loss": 4.6289, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0344827586206898e-06, |
|
"loss": 5.3125, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.3793103448275862e-06, |
|
"loss": 5.4297, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.724137931034483e-06, |
|
"loss": 5.4922, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0689655172413796e-06, |
|
"loss": 4.0117, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.4137931034482762e-06, |
|
"loss": 5.2227, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.7586206896551725e-06, |
|
"loss": 5.0508, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.103448275862069e-06, |
|
"loss": 5.0273, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.448275862068966e-06, |
|
"loss": 4.6289, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.793103448275862e-06, |
|
"loss": 4.5156, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.137931034482759e-06, |
|
"loss": 4.5703, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.482758620689656e-06, |
|
"loss": 3.4629, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.8275862068965525e-06, |
|
"loss": 4.2754, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.172413793103449e-06, |
|
"loss": 4.0742, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.517241379310345e-06, |
|
"loss": 3.9727, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.862068965517242e-06, |
|
"loss": 3.7148, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.206896551724138e-06, |
|
"loss": 3.3047, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.551724137931035e-06, |
|
"loss": 3.7109, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.896551724137932e-06, |
|
"loss": 3.1035, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.241379310344828e-06, |
|
"loss": 4.0449, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.586206896551724e-06, |
|
"loss": 3.6719, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.93103448275862e-06, |
|
"loss": 3.7344, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.275862068965518e-06, |
|
"loss": 3.3105, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.620689655172414e-06, |
|
"loss": 3.3125, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.965517241379312e-06, |
|
"loss": 3.2598, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.310344827586207e-06, |
|
"loss": 2.9922, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.655172413793105e-06, |
|
"loss": 3.0723, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5176, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0344827586206898e-05, |
|
"loss": 1.772, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0689655172413792e-05, |
|
"loss": 2.7646, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.103448275862069e-05, |
|
"loss": 2.3545, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1379310344827587e-05, |
|
"loss": 2.7559, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1724137931034483e-05, |
|
"loss": 2.6309, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.206896551724138e-05, |
|
"loss": 2.2471, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2413793103448277e-05, |
|
"loss": 2.4277, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2758620689655174e-05, |
|
"loss": 2.7324, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.310344827586207e-05, |
|
"loss": 2.5166, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3448275862068967e-05, |
|
"loss": 2.4258, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3793103448275863e-05, |
|
"loss": 2.1025, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4137931034482759e-05, |
|
"loss": 2.6289, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4482758620689657e-05, |
|
"loss": 2.3945, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4827586206896554e-05, |
|
"loss": 2.0024, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5172413793103448e-05, |
|
"loss": 2.0791, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5517241379310346e-05, |
|
"loss": 2.1133, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.586206896551724e-05, |
|
"loss": 2.0801, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6206896551724137e-05, |
|
"loss": 2.251, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6551724137931037e-05, |
|
"loss": 1.9714, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6896551724137932e-05, |
|
"loss": 2.2617, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7241379310344828e-05, |
|
"loss": 1.8398, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7586206896551724e-05, |
|
"loss": 2.0005, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7931034482758623e-05, |
|
"loss": 1.9062, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.827586206896552e-05, |
|
"loss": 1.9971, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8620689655172415e-05, |
|
"loss": 1.6963, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.896551724137931e-05, |
|
"loss": 1.8438, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.931034482758621e-05, |
|
"loss": 1.686, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9655172413793106e-05, |
|
"loss": 2.041, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2e-05, |
|
"loss": 1.7822, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999985627947104e-05, |
|
"loss": 1.9697, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999942511829714e-05, |
|
"loss": 1.8018, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999987065177177e-05, |
|
"loss": 1.8613, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999770047979828e-05, |
|
"loss": 1.4146, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999640700743063e-05, |
|
"loss": 1.6846, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999482610433275e-05, |
|
"loss": 1.7246, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999929577750487e-05, |
|
"loss": 1.8691, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.99990802024949e-05, |
|
"loss": 1.7725, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9998835886022997e-05, |
|
"loss": 1.5044, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999856282879144e-05, |
|
"loss": 1.3157, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9998261031585104e-05, |
|
"loss": 1.8428, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999793049527148e-05, |
|
"loss": 2.0723, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999757122080066e-05, |
|
"loss": 1.8516, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999718320920535e-05, |
|
"loss": 1.4434, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9996766461600855e-05, |
|
"loss": 1.2244, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9996320979185074e-05, |
|
"loss": 1.6006, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9995846763238514e-05, |
|
"loss": 1.8066, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999534381512426e-05, |
|
"loss": 1.9062, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9994812136287994e-05, |
|
"loss": 2.0957, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9994251728257978e-05, |
|
"loss": 1.7969, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9993662592645057e-05, |
|
"loss": 1.958, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999304473114265e-05, |
|
"loss": 1.4932, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9992398145526738e-05, |
|
"loss": 1.4139, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999172283765588e-05, |
|
"loss": 1.6318, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999101880947118e-05, |
|
"loss": 1.9033, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9990286062996316e-05, |
|
"loss": 1.7368, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9989524600337493e-05, |
|
"loss": 1.9033, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998873442368347e-05, |
|
"loss": 1.5342, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998791553530554e-05, |
|
"loss": 1.459, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9987067937557527e-05, |
|
"loss": 1.5562, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9986191632875768e-05, |
|
"loss": 1.3936, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9985286623779126e-05, |
|
"loss": 1.494, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9984352912868973e-05, |
|
"loss": 1.312, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9983390502829168e-05, |
|
"loss": 1.8467, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998239939642608e-05, |
|
"loss": 1.4397, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998137959650855e-05, |
|
"loss": 1.8779, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9980331106007912e-05, |
|
"loss": 1.7192, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9979253927937948e-05, |
|
"loss": 1.6748, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9978148065394917e-05, |
|
"loss": 1.5498, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9977013521557516e-05, |
|
"loss": 1.2637, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9975850299686894e-05, |
|
"loss": 1.1064, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9974658403126624e-05, |
|
"loss": 1.5728, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997343783530271e-05, |
|
"loss": 1.5781, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9972188599723564e-05, |
|
"loss": 1.7412, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997091069998e-05, |
|
"loss": 1.3691, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9969604139745227e-05, |
|
"loss": 1.4824, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9968268922774842e-05, |
|
"loss": 1.5947, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.99669050529068e-05, |
|
"loss": 1.5049, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9965512534061426e-05, |
|
"loss": 1.8086, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9964091370241384e-05, |
|
"loss": 1.5146, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9962641565531694e-05, |
|
"loss": 1.6699, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996116312409968e-05, |
|
"loss": 1.4238, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9959656050194994e-05, |
|
"loss": 1.3311, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9958120348149585e-05, |
|
"loss": 1.5396, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9956556022377693e-05, |
|
"loss": 1.3779, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995496307737583e-05, |
|
"loss": 1.2122, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9953341517722773e-05, |
|
"loss": 1.8545, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9951691348079553e-05, |
|
"loss": 1.0951, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995001257318943e-05, |
|
"loss": 1.6533, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.99483051978779e-05, |
|
"loss": 1.6465, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.994656922705266e-05, |
|
"loss": 1.6045, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9944804665703593e-05, |
|
"loss": 1.6978, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.994301151890278e-05, |
|
"loss": 1.6748, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9941189791804466e-05, |
|
"loss": 1.3828, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9939339489645034e-05, |
|
"loss": 1.3296, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9937460617743013e-05, |
|
"loss": 1.6924, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.993555318149906e-05, |
|
"loss": 1.2607, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9933617186395917e-05, |
|
"loss": 1.7383, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9931652637998443e-05, |
|
"loss": 1.7686, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9929659541953548e-05, |
|
"loss": 1.8975, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.992763790399021e-05, |
|
"loss": 1.8799, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9925587729919447e-05, |
|
"loss": 1.5991, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.99235090256343e-05, |
|
"loss": 1.999, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.992140179710982e-05, |
|
"loss": 1.5149, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.991926605040305e-05, |
|
"loss": 1.7344, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9917101791652997e-05, |
|
"loss": 1.1921, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.991490902708063e-05, |
|
"loss": 1.6406, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9912687762988855e-05, |
|
"loss": 1.7539, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9910438005762503e-05, |
|
"loss": 1.3281, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9908159761868292e-05, |
|
"loss": 1.6001, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9905853037854834e-05, |
|
"loss": 1.3193, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9903517840352605e-05, |
|
"loss": 1.2607, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9901154176073914e-05, |
|
"loss": 2.1016, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9898762051812906e-05, |
|
"loss": 1.7861, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9896341474445526e-05, |
|
"loss": 1.2358, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.989389245092951e-05, |
|
"loss": 1.8379, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9891414988304357e-05, |
|
"loss": 1.6826, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.988890909369131e-05, |
|
"loss": 1.3398, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.988637477429334e-05, |
|
"loss": 1.6172, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9883812037395126e-05, |
|
"loss": 1.877, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.988122089036302e-05, |
|
"loss": 1.4463, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.987860134064504e-05, |
|
"loss": 1.0048, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.987595339577085e-05, |
|
"loss": 1.7891, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9873277063351737e-05, |
|
"loss": 1.5967, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9870572351080573e-05, |
|
"loss": 1.7192, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9867839266731813e-05, |
|
"loss": 1.8652, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9865077818161464e-05, |
|
"loss": 1.4697, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9862288013307062e-05, |
|
"loss": 1.7266, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9859469860187654e-05, |
|
"loss": 1.3608, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9856623366903765e-05, |
|
"loss": 1.1792, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9853748541637387e-05, |
|
"loss": 1.2749, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.985084539265195e-05, |
|
"loss": 1.1621, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9847913928292297e-05, |
|
"loss": 1.6816, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9844954156984656e-05, |
|
"loss": 1.6172, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9841966087236626e-05, |
|
"loss": 1.4736, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.983894972763715e-05, |
|
"loss": 1.1748, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.983590508685648e-05, |
|
"loss": 1.3047, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9832832173646164e-05, |
|
"loss": 1.8203, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.982973099683902e-05, |
|
"loss": 1.2522, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.98266015653491e-05, |
|
"loss": 1.0513, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9823443888171676e-05, |
|
"loss": 0.9141, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9820257974383205e-05, |
|
"loss": 1.2417, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9817043833141317e-05, |
|
"loss": 1.8027, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.981380147368477e-05, |
|
"loss": 1.6753, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9810530905333437e-05, |
|
"loss": 1.6045, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.980723213748828e-05, |
|
"loss": 1.9287, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9803905179631303e-05, |
|
"loss": 1.231, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9800550041325553e-05, |
|
"loss": 1.4736, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9797166732215078e-05, |
|
"loss": 1.4077, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.979375526202489e-05, |
|
"loss": 1.7012, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9790315640560962e-05, |
|
"loss": 1.4067, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9786847877710177e-05, |
|
"loss": 1.0358, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9783351983440306e-05, |
|
"loss": 1.3394, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9779827967799985e-05, |
|
"loss": 1.7329, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9776275840918685e-05, |
|
"loss": 1.2595, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9772695613006676e-05, |
|
"loss": 1.1616, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9769087294355006e-05, |
|
"loss": 1.5562, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9765450895335454e-05, |
|
"loss": 1.3149, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9761786426400532e-05, |
|
"loss": 1.5107, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.975809389808343e-05, |
|
"loss": 1.7129, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9754373320997987e-05, |
|
"loss": 1.5195, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9750624705838666e-05, |
|
"loss": 1.1064, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.974684806338053e-05, |
|
"loss": 1.5518, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9743043404479203e-05, |
|
"loss": 1.394, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9739210740070833e-05, |
|
"loss": 1.686, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.973535008117207e-05, |
|
"loss": 1.6709, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9731461438880034e-05, |
|
"loss": 1.4941, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.972754482437228e-05, |
|
"loss": 1.9531, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9723600248906766e-05, |
|
"loss": 1.6992, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9719627723821818e-05, |
|
"loss": 1.6558, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.971562726053611e-05, |
|
"loss": 1.7363, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.971159887054862e-05, |
|
"loss": 1.6577, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.970754256543858e-05, |
|
"loss": 1.4634, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9703458356865484e-05, |
|
"loss": 1.7441, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9699346256569033e-05, |
|
"loss": 1.2324, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9695206276369077e-05, |
|
"loss": 1.4375, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.969103842816563e-05, |
|
"loss": 1.8916, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.968684272393879e-05, |
|
"loss": 1.2065, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.968261917574874e-05, |
|
"loss": 1.4702, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.967836779573569e-05, |
|
"loss": 1.4023, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9674088596119853e-05, |
|
"loss": 1.4757, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.96697815892014e-05, |
|
"loss": 1.1831, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9665446787360444e-05, |
|
"loss": 1.8867, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9661084203056983e-05, |
|
"loss": 1.5752, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.965669384883087e-05, |
|
"loss": 1.501, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9652275737301797e-05, |
|
"loss": 1.4746, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9647829881169223e-05, |
|
"loss": 1.5176, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9643356293212363e-05, |
|
"loss": 1.9346, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.963885498629015e-05, |
|
"loss": 1.9209, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9634325973341183e-05, |
|
"loss": 1.1978, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.962976926738371e-05, |
|
"loss": 1.6904, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.962518488151557e-05, |
|
"loss": 1.7881, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9620572828914177e-05, |
|
"loss": 1.542, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9615933122836454e-05, |
|
"loss": 1.248, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.961126577661883e-05, |
|
"loss": 1.072, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.960657080367717e-05, |
|
"loss": 1.791, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9601848217506755e-05, |
|
"loss": 1.5854, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9597098031682238e-05, |
|
"loss": 1.4705, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9592320259857603e-05, |
|
"loss": 1.6816, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9587514915766124e-05, |
|
"loss": 1.9238, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9582682013220337e-05, |
|
"loss": 1.6934, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.957782156611199e-05, |
|
"loss": 1.2354, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9572933588411997e-05, |
|
"loss": 1.7544, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9568018094170415e-05, |
|
"loss": 1.7842, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9563075097516397e-05, |
|
"loss": 1.0984, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9558104612658137e-05, |
|
"loss": 1.5488, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.955310665388286e-05, |
|
"loss": 1.9023, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.954808123555674e-05, |
|
"loss": 1.7031, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9543028372124903e-05, |
|
"loss": 1.6113, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9537948078111348e-05, |
|
"loss": 1.791, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.953284036811893e-05, |
|
"loss": 1.6338, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9527705256829293e-05, |
|
"loss": 1.5161, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9522542759002866e-05, |
|
"loss": 1.5859, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9517352889478787e-05, |
|
"loss": 1.8755, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.951213566317487e-05, |
|
"loss": 1.7578, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9506891095087563e-05, |
|
"loss": 1.0671, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.950161920029191e-05, |
|
"loss": 1.1311, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9496319993941498e-05, |
|
"loss": 1.269, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.949099349126843e-05, |
|
"loss": 1.2642, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9485639707583255e-05, |
|
"loss": 1.2371, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9480258658274947e-05, |
|
"loss": 1.6943, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.947485035881085e-05, |
|
"loss": 1.7363, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.946941482473664e-05, |
|
"loss": 1.9277, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9463952071676268e-05, |
|
"loss": 1.6338, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9458462115331938e-05, |
|
"loss": 1.4199, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.945294497148403e-05, |
|
"loss": 1.7793, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9447400655991086e-05, |
|
"loss": 1.4854, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9441829184789745e-05, |
|
"loss": 1.2202, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.94362305738947e-05, |
|
"loss": 1.3418, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9430604839398657e-05, |
|
"loss": 1.3691, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.942495199747229e-05, |
|
"loss": 1.5259, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9419272064364186e-05, |
|
"loss": 1.7598, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9413565056400804e-05, |
|
"loss": 1.5083, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.940783098998643e-05, |
|
"loss": 1.5337, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9402069881603123e-05, |
|
"loss": 1.29, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.939628174781068e-05, |
|
"loss": 1.6855, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.939046660524656e-05, |
|
"loss": 1.3389, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9384624470625884e-05, |
|
"loss": 1.4673, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9378755360741342e-05, |
|
"loss": 1.459, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9372859292463163e-05, |
|
"loss": 1.3325, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9366936282739067e-05, |
|
"loss": 1.29, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9360986348594226e-05, |
|
"loss": 1.7007, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.935500950713118e-05, |
|
"loss": 1.6743, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.934900577552983e-05, |
|
"loss": 1.2891, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.934297517104737e-05, |
|
"loss": 1.8789, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9336917711018227e-05, |
|
"loss": 1.5244, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9330833412854035e-05, |
|
"loss": 1.4014, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.932472229404356e-05, |
|
"loss": 1.1035, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9318584372152668e-05, |
|
"loss": 1.1802, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9312419664824268e-05, |
|
"loss": 1.4917, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9306228189778255e-05, |
|
"loss": 1.3477, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9300009964811475e-05, |
|
"loss": 1.7637, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9293765007797662e-05, |
|
"loss": 1.4507, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.928749333668738e-05, |
|
"loss": 1.1709, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.928119496950799e-05, |
|
"loss": 1.7305, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9274869924363585e-05, |
|
"loss": 1.3074, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9268518219434943e-05, |
|
"loss": 1.1116, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9262139872979474e-05, |
|
"loss": 1.8936, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9255734903331157e-05, |
|
"loss": 1.5659, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9249303328900505e-05, |
|
"loss": 1.3179, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9242845168174514e-05, |
|
"loss": 1.4568, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.923636043971658e-05, |
|
"loss": 1.7562, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9229849162166478e-05, |
|
"loss": 1.6768, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.922331135424029e-05, |
|
"loss": 1.2554, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.921674703473037e-05, |
|
"loss": 1.8506, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.921015622250526e-05, |
|
"loss": 1.6934, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.920353893650966e-05, |
|
"loss": 1.7227, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9196895195764363e-05, |
|
"loss": 1.0075, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.919022501936622e-05, |
|
"loss": 1.4814, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.918352842648804e-05, |
|
"loss": 1.6152, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9176805436378597e-05, |
|
"loss": 2.1514, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9170056068362516e-05, |
|
"loss": 1.5288, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9163280341840253e-05, |
|
"loss": 1.666, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9156478276288027e-05, |
|
"loss": 1.6278, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9149649891257772e-05, |
|
"loss": 1.6504, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9142795206377066e-05, |
|
"loss": 1.5586, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9135914241349088e-05, |
|
"loss": 1.5718, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9129007015952556e-05, |
|
"loss": 1.4395, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9122073550041675e-05, |
|
"loss": 1.8735, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.911511386354607e-05, |
|
"loss": 1.6025, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9108127976470737e-05, |
|
"loss": 1.4951, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9101115908895985e-05, |
|
"loss": 1.0557, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9094077680977377e-05, |
|
"loss": 1.8306, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9087013312945668e-05, |
|
"loss": 1.1816, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.907992282510675e-05, |
|
"loss": 1.3887, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.90728062378416e-05, |
|
"loss": 1.3164, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.906566357160621e-05, |
|
"loss": 1.123, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9058494846931538e-05, |
|
"loss": 1.5557, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9051300084423433e-05, |
|
"loss": 1.1377, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.904407930476261e-05, |
|
"loss": 1.5459, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9036832528704547e-05, |
|
"loss": 1.3032, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.902955977707945e-05, |
|
"loss": 1.665, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9022261070792197e-05, |
|
"loss": 1.6816, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.901493643082227e-05, |
|
"loss": 1.8926, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.900758587822369e-05, |
|
"loss": 1.751, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.900020943412496e-05, |
|
"loss": 1.3843, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8992807119729013e-05, |
|
"loss": 1.8936, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8985378956313133e-05, |
|
"loss": 1.709, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8977924965228923e-05, |
|
"loss": 1.4668, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.89704451679022e-05, |
|
"loss": 1.4849, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8962939585832985e-05, |
|
"loss": 1.5342, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8955408240595396e-05, |
|
"loss": 1.2207, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.894785115383761e-05, |
|
"loss": 1.7241, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8940268347281802e-05, |
|
"loss": 1.8047, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8932659842724067e-05, |
|
"loss": 1.4448, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8925025662034376e-05, |
|
"loss": 1.6621, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8917365827156493e-05, |
|
"loss": 1.627, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.890968036010793e-05, |
|
"loss": 1.5327, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8901969282979882e-05, |
|
"loss": 1.6558, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.889423261793714e-05, |
|
"loss": 1.3013, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.888647038721806e-05, |
|
"loss": 1.7744, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8878682613134486e-05, |
|
"loss": 1.4575, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8870869318071667e-05, |
|
"loss": 1.1382, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.886303052448823e-05, |
|
"loss": 1.3679, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8855166254916082e-05, |
|
"loss": 1.3848, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8847276531960363e-05, |
|
"loss": 1.6313, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8839361378299376e-05, |
|
"loss": 1.6836, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8831420816684528e-05, |
|
"loss": 1.7178, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8823454869940243e-05, |
|
"loss": 1.8096, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8815463560963932e-05, |
|
"loss": 1.8037, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8807446912725893e-05, |
|
"loss": 1.6787, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8799404948269267e-05, |
|
"loss": 1.4805, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.879133769070996e-05, |
|
"loss": 1.5674, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.878324516323658e-05, |
|
"loss": 1.9429, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.877512738911038e-05, |
|
"loss": 1.6042, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8766984391665173e-05, |
|
"loss": 1.5635, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8758816194307272e-05, |
|
"loss": 1.3354, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.875062282051544e-05, |
|
"loss": 1.7881, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8742404293840784e-05, |
|
"loss": 0.9346, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8734160637906738e-05, |
|
"loss": 1.7529, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8725891876408946e-05, |
|
"loss": 1.5811, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8717598033115224e-05, |
|
"loss": 1.1401, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8709279131865486e-05, |
|
"loss": 1.3574, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8700935196571666e-05, |
|
"loss": 1.2129, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8692566251217665e-05, |
|
"loss": 1.8516, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8684172319859258e-05, |
|
"loss": 1.5688, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.867575342662406e-05, |
|
"loss": 1.5005, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8667309595711426e-05, |
|
"loss": 1.5867, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8658840851392386e-05, |
|
"loss": 1.1108, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8650347218009596e-05, |
|
"loss": 1.0679, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8641828719977244e-05, |
|
"loss": 0.9811, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8633285381780987e-05, |
|
"loss": 1.2744, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.862471722797789e-05, |
|
"loss": 1.6221, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.861612428319634e-05, |
|
"loss": 1.5977, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8607506572135994e-05, |
|
"loss": 1.332, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8598864119567693e-05, |
|
"loss": 1.2905, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.859019695033339e-05, |
|
"loss": 1.1235, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.858150508934609e-05, |
|
"loss": 1.1279, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8572788561589766e-05, |
|
"loss": 1.1641, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8564047392119306e-05, |
|
"loss": 1.4949, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.855528160606041e-05, |
|
"loss": 1.4187, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8546491228609552e-05, |
|
"loss": 1.2559, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8537676285033886e-05, |
|
"loss": 1.6514, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.852883680067118e-05, |
|
"loss": 1.7793, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.851997280092974e-05, |
|
"loss": 1.459, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8511084311288338e-05, |
|
"loss": 1.6787, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8502171357296144e-05, |
|
"loss": 1.5303, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8493233964572648e-05, |
|
"loss": 1.7461, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8484272158807587e-05, |
|
"loss": 1.5869, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.847528596576087e-05, |
|
"loss": 1.7012, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8466275411262504e-05, |
|
"loss": 1.6045, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8457240521212525e-05, |
|
"loss": 1.6836, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8448181321580916e-05, |
|
"loss": 1.2324, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8439097838407532e-05, |
|
"loss": 1.6055, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8429990097802037e-05, |
|
"loss": 0.9858, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8420858125943818e-05, |
|
"loss": 1.3906, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8411701949081906e-05, |
|
"loss": 1.6992, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.840252159353492e-05, |
|
"loss": 1.3896, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8393317085690964e-05, |
|
"loss": 1.6509, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.838408845200758e-05, |
|
"loss": 1.6553, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.837483571901164e-05, |
|
"loss": 1.6118, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8365558913299307e-05, |
|
"loss": 1.6045, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.835625806153593e-05, |
|
"loss": 1.168, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.834693319045597e-05, |
|
"loss": 1.4414, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8337584326862935e-05, |
|
"loss": 1.0396, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8328211497629304e-05, |
|
"loss": 1.666, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8318814729696427e-05, |
|
"loss": 1.292, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8309394050074482e-05, |
|
"loss": 1.2471, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8299949485842366e-05, |
|
"loss": 1.5879, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8290481064147637e-05, |
|
"loss": 1.709, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.828098881220642e-05, |
|
"loss": 1.6855, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8271472757303352e-05, |
|
"loss": 1.3252, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8261932926791477e-05, |
|
"loss": 1.6582, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8252369348092184e-05, |
|
"loss": 1.5156, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.824278204869513e-05, |
|
"loss": 1.5378, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.823317105615815e-05, |
|
"loss": 0.9766, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8223536398107177e-05, |
|
"loss": 1.1362, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8213878102236177e-05, |
|
"loss": 1.4385, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8204196196307058e-05, |
|
"loss": 1.6162, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.819449070814959e-05, |
|
"loss": 1.4419, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8184761665661335e-05, |
|
"loss": 1.5757, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8175009096807557e-05, |
|
"loss": 1.5146, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8165233029621136e-05, |
|
"loss": 1.5449, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8155433492202507e-05, |
|
"loss": 1.6187, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8145610512719565e-05, |
|
"loss": 1.2803, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8135764119407585e-05, |
|
"loss": 1.3052, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8125894340569145e-05, |
|
"loss": 1.6846, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8116001204574042e-05, |
|
"loss": 1.8115, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8106084739859206e-05, |
|
"loss": 1.4536, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8096144974928635e-05, |
|
"loss": 1.7686, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.808618193835329e-05, |
|
"loss": 1.7617, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8076195658771033e-05, |
|
"loss": 1.1416, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8066186164886525e-05, |
|
"loss": 1.0762, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8056153485471167e-05, |
|
"loss": 1.4985, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8046097649362994e-05, |
|
"loss": 1.449, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8036018685466614e-05, |
|
"loss": 1.6416, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.80259166227531e-05, |
|
"loss": 1.7617, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8015791490259932e-05, |
|
"loss": 1.8594, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.80056433170909e-05, |
|
"loss": 1.1938, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.799547213241602e-05, |
|
"loss": 1.0913, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.798527796547145e-05, |
|
"loss": 1.229, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7975060845559412e-05, |
|
"loss": 1.3936, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.79648208020481e-05, |
|
"loss": 1.4009, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7954557864371615e-05, |
|
"loss": 1.7529, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.794427206202984e-05, |
|
"loss": 1.6252, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.79339634245884e-05, |
|
"loss": 1.7422, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7923631981678552e-05, |
|
"loss": 1.7744, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7913277762997105e-05, |
|
"loss": 1.6572, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7902900798306334e-05, |
|
"loss": 1.7285, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7892501117433896e-05, |
|
"loss": 1.4902, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.788207875027274e-05, |
|
"loss": 1.8623, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7871633726781042e-05, |
|
"loss": 0.8735, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.786116607698207e-05, |
|
"loss": 1.8496, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.785067583096416e-05, |
|
"loss": 1.4072, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.784016301888058e-05, |
|
"loss": 1.4888, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7829627670949468e-05, |
|
"loss": 1.4424, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7819069817453746e-05, |
|
"loss": 1.8037, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7808489488741013e-05, |
|
"loss": 1.4629, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.779788671522348e-05, |
|
"loss": 1.5479, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.778726152737787e-05, |
|
"loss": 1.1143, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7776613955745337e-05, |
|
"loss": 1.5515, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.776594403093138e-05, |
|
"loss": 1.6328, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7755251783605733e-05, |
|
"loss": 1.9775, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.774453724450231e-05, |
|
"loss": 1.1982, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7733800444419088e-05, |
|
"loss": 1.1538, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7723041414218053e-05, |
|
"loss": 1.3623, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7712260184825063e-05, |
|
"loss": 1.1934, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7701456787229805e-05, |
|
"loss": 1.4868, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7690631252485676e-05, |
|
"loss": 1.3418, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.767978361170971e-05, |
|
"loss": 1.564, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.766891389608248e-05, |
|
"loss": 1.6118, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7658022136848008e-05, |
|
"loss": 1.4795, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7647108365313683e-05, |
|
"loss": 1.5483, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.763617261285017e-05, |
|
"loss": 1.8125, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7625214910891307e-05, |
|
"loss": 1.3335, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.761423529093403e-05, |
|
"loss": 1.9668, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7603233784538275e-05, |
|
"loss": 1.5422, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.759221042332689e-05, |
|
"loss": 1.5212, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7581165238985533e-05, |
|
"loss": 1.6543, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7570098263262608e-05, |
|
"loss": 1.6338, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7559009527969145e-05, |
|
"loss": 1.5942, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.754789906497872e-05, |
|
"loss": 1.5747, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7536766906227365e-05, |
|
"loss": 1.6245, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.752561308371348e-05, |
|
"loss": 1.4292, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.751443762949772e-05, |
|
"loss": 1.4087, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.750324057570294e-05, |
|
"loss": 1.7461, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7492021954514068e-05, |
|
"loss": 1.2061, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.748078179817802e-05, |
|
"loss": 1.5674, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7469520139003627e-05, |
|
"loss": 1.4792, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.745823700936152e-05, |
|
"loss": 1.6392, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7446932441684044e-05, |
|
"loss": 1.6133, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.743560646846517e-05, |
|
"loss": 1.1045, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7424259122260396e-05, |
|
"loss": 1.6724, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.741289043568665e-05, |
|
"loss": 1.396, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.740150044142221e-05, |
|
"loss": 1.7134, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7390089172206594e-05, |
|
"loss": 1.4453, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7378656660840475e-05, |
|
"loss": 1.8496, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7367202940185583e-05, |
|
"loss": 1.2969, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7355728043164613e-05, |
|
"loss": 1.7539, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7344232002761138e-05, |
|
"loss": 1.5869, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7332714852019487e-05, |
|
"loss": 1.334, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.732117662404469e-05, |
|
"loss": 1.4746, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7309617352002343e-05, |
|
"loss": 1.7705, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7298037069118546e-05, |
|
"loss": 1.3179, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7286435808679787e-05, |
|
"loss": 1.3662, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7274813604032847e-05, |
|
"loss": 1.6641, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7263170488584717e-05, |
|
"loss": 0.8788, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.725150649580249e-05, |
|
"loss": 1.75, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7239821659213272e-05, |
|
"loss": 1.7539, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7228116012404085e-05, |
|
"loss": 1.8027, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.721638958902175e-05, |
|
"loss": 1.585, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7204642422772836e-05, |
|
"loss": 1.4321, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7192874547423514e-05, |
|
"loss": 1.502, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.71810859967995e-05, |
|
"loss": 1.4385, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.716927680478591e-05, |
|
"loss": 1.2754, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.715744700532723e-05, |
|
"loss": 1.2314, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.714559663242715e-05, |
|
"loss": 1.2578, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7133725720148513e-05, |
|
"loss": 1.2544, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.712183430261319e-05, |
|
"loss": 1.1835, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7109922414001997e-05, |
|
"loss": 1.4375, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7097990088554595e-05, |
|
"loss": 1.3574, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7086037360569386e-05, |
|
"loss": 0.7559, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.707406426440342e-05, |
|
"loss": 1.8052, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7062070834472287e-05, |
|
"loss": 1.3945, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7050057105250034e-05, |
|
"loss": 0.8877, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7038023111269045e-05, |
|
"loss": 1.6357, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7025968887119962e-05, |
|
"loss": 1.5205, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.701389446745158e-05, |
|
"loss": 1.627, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7001799886970735e-05, |
|
"loss": 1.8467, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6989685180442212e-05, |
|
"loss": 1.0562, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.697755038268866e-05, |
|
"loss": 1.2578, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.696539552859046e-05, |
|
"loss": 1.7163, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6953220653085662e-05, |
|
"loss": 1.0232, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6941025791169855e-05, |
|
"loss": 1.1108, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.692881097789608e-05, |
|
"loss": 1.2886, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.691657624837472e-05, |
|
"loss": 1.4741, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6904321637773414e-05, |
|
"loss": 1.4961, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6892047181316952e-05, |
|
"loss": 1.0444, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.687975291428715e-05, |
|
"loss": 1.5532, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6867438872022782e-05, |
|
"loss": 1.0918, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6855105089919467e-05, |
|
"loss": 1.3818, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.684275160342955e-05, |
|
"loss": 1.0767, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.683037844806203e-05, |
|
"loss": 1.603, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6817985659382437e-05, |
|
"loss": 1.166, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.680557327301273e-05, |
|
"loss": 1.1975, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.67931413246312e-05, |
|
"loss": 1.1372, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.678068984997238e-05, |
|
"loss": 1.4121, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6768218884826916e-05, |
|
"loss": 1.6565, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.675572846504148e-05, |
|
"loss": 1.5146, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.674321862651867e-05, |
|
"loss": 1.5928, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6730689405216895e-05, |
|
"loss": 1.7119, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6718140837150286e-05, |
|
"loss": 1.2321, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6705572958388576e-05, |
|
"loss": 1.1724, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.669298580505701e-05, |
|
"loss": 1.0166, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.668037941333623e-05, |
|
"loss": 1.688, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6667753819462196e-05, |
|
"loss": 1.0674, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.665510905972603e-05, |
|
"loss": 1.5879, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.664244517047398e-05, |
|
"loss": 0.9707, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6629762188107248e-05, |
|
"loss": 1.1733, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6617060149081944e-05, |
|
"loss": 1.3101, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6604339089908935e-05, |
|
"loss": 1.5615, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6591599047153778e-05, |
|
"loss": 1.4438, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.657884005743658e-05, |
|
"loss": 1.5654, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6566062157431915e-05, |
|
"loss": 1.4775, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.655326538386872e-05, |
|
"loss": 1.3428, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6540449773530168e-05, |
|
"loss": 1.5181, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.652761536325359e-05, |
|
"loss": 1.0115, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6514762189930353e-05, |
|
"loss": 1.7163, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.650189029050575e-05, |
|
"loss": 1.6455, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6488999701978905e-05, |
|
"loss": 1.5811, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6476090461402664e-05, |
|
"loss": 1.1797, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6463162605883485e-05, |
|
"loss": 1.5205, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.645021617258133e-05, |
|
"loss": 1.1045, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6437251198709567e-05, |
|
"loss": 1.0125, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6424267721534856e-05, |
|
"loss": 1.4424, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.641126577837703e-05, |
|
"loss": 1.6807, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6398245406609027e-05, |
|
"loss": 1.4336, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.638520664365673e-05, |
|
"loss": 1.7842, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6372149526998905e-05, |
|
"loss": 1.6982, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.635907409416706e-05, |
|
"loss": 1.0474, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.634598038274536e-05, |
|
"loss": 1.8516, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6332868430370508e-05, |
|
"loss": 1.626, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.631973827473164e-05, |
|
"loss": 1.0522, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6306589953570207e-05, |
|
"loss": 1.5664, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6293423504679898e-05, |
|
"loss": 1.3301, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6280238965906474e-05, |
|
"loss": 1.4199, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6267036375147728e-05, |
|
"loss": 1.6855, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6253815770353316e-05, |
|
"loss": 1.5957, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6240577189524688e-05, |
|
"loss": 1.7695, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6227320670714966e-05, |
|
"loss": 1.1191, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6214046252028823e-05, |
|
"loss": 1.6113, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6200753971622387e-05, |
|
"loss": 1.2463, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.618744386770313e-05, |
|
"loss": 1.4434, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.617411597852976e-05, |
|
"loss": 1.3906, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6160770342412097e-05, |
|
"loss": 1.6367, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.614740699771098e-05, |
|
"loss": 1.5615, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6134025982838153e-05, |
|
"loss": 1.7871, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6120627336256142e-05, |
|
"loss": 1.1763, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.610721109647816e-05, |
|
"loss": 1.5293, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.609377730206799e-05, |
|
"loss": 1.4556, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6080325991639866e-05, |
|
"loss": 1.8984, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6066857203858387e-05, |
|
"loss": 1.4844, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.605337097743837e-05, |
|
"loss": 1.293, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6039867351144778e-05, |
|
"loss": 1.834, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6026346363792565e-05, |
|
"loss": 1.3242, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6012808054246608e-05, |
|
"loss": 1.7012, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5999252461421564e-05, |
|
"loss": 1.6201, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5985679624281772e-05, |
|
"loss": 1.5669, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.597208958184114e-05, |
|
"loss": 1.6226, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.595848237316303e-05, |
|
"loss": 1.4126, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5944858037360145e-05, |
|
"loss": 1.5977, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5931216613594426e-05, |
|
"loss": 1.2842, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5917558141076915e-05, |
|
"loss": 1.4741, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.590388265906768e-05, |
|
"loss": 1.7705, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5890190206875663e-05, |
|
"loss": 1.7461, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.58764808238586e-05, |
|
"loss": 1.6201, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5862754549422884e-05, |
|
"loss": 1.4697, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5849011423023463e-05, |
|
"loss": 1.6387, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5835251484163728e-05, |
|
"loss": 1.6191, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5821474772395385e-05, |
|
"loss": 1.5635, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5807681327318372e-05, |
|
"loss": 1.6963, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5793871188580704e-05, |
|
"loss": 1.5742, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.578004439587839e-05, |
|
"loss": 1.6318, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5766200988955315e-05, |
|
"loss": 1.469, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.575234100760311e-05, |
|
"loss": 1.3774, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5738464491661054e-05, |
|
"loss": 1.2677, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5724571481015953e-05, |
|
"loss": 1.1133, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5710662015602016e-05, |
|
"loss": 1.6191, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5696736135400765e-05, |
|
"loss": 1.042, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5682793880440895e-05, |
|
"loss": 1.624, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.566883529079817e-05, |
|
"loss": 1.625, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5654860406595312e-05, |
|
"loss": 1.291, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5640869268001875e-05, |
|
"loss": 1.3999, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.562686191523413e-05, |
|
"loss": 1.7725, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5612838388554966e-05, |
|
"loss": 1.4971, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5598798728273753e-05, |
|
"loss": 1.4702, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5584742974746242e-05, |
|
"loss": 1.71, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.557067116837444e-05, |
|
"loss": 0.9536, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5556583349606492e-05, |
|
"loss": 1.3076, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5542479558936572e-05, |
|
"loss": 1.6582, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5528359836904774e-05, |
|
"loss": 1.5371, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.551422422409697e-05, |
|
"loss": 1.6641, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5500072761144712e-05, |
|
"loss": 1.5273, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5485905488725126e-05, |
|
"loss": 1.7495, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5471722447560757e-05, |
|
"loss": 1.6455, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5457523678419498e-05, |
|
"loss": 0.9026, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5443309222114434e-05, |
|
"loss": 1.4551, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5429079119503752e-05, |
|
"loss": 1.2505, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5414833411490612e-05, |
|
"loss": 1.165, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5400572139023026e-05, |
|
"loss": 1.1001, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5386295343093743e-05, |
|
"loss": 1.2588, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.537200306474014e-05, |
|
"loss": 1.6746, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5357695345044096e-05, |
|
"loss": 1.3276, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5343372225131863e-05, |
|
"loss": 1.6641, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5329033746173975e-05, |
|
"loss": 1.6729, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5314679949385105e-05, |
|
"loss": 1.583, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.530031087602396e-05, |
|
"loss": 1.5801, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5285926567393158e-05, |
|
"loss": 1.4902, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5271527064839107e-05, |
|
"loss": 1.8047, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5257112409751886e-05, |
|
"loss": 1.4009, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5242682643565135e-05, |
|
"loss": 1.7168, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5228237807755925e-05, |
|
"loss": 1.1797, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5213777943844648e-05, |
|
"loss": 0.9973, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5199303093394883e-05, |
|
"loss": 1.3062, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5184813298013304e-05, |
|
"loss": 1.7539, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5170308599349524e-05, |
|
"loss": 1.7422, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5155789039096005e-05, |
|
"loss": 1.5481, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5141254658987922e-05, |
|
"loss": 1.4243, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5126705500803055e-05, |
|
"loss": 1.5732, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5112141606361657e-05, |
|
"loss": 1.7812, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5097563017526336e-05, |
|
"loss": 1.6602, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5082969776201948e-05, |
|
"loss": 1.4185, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5068361924335457e-05, |
|
"loss": 1.3149, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.505373950391583e-05, |
|
"loss": 1.1836, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5039102556973899e-05, |
|
"loss": 1.0845, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5024451125582271e-05, |
|
"loss": 1.6191, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5009785251855164e-05, |
|
"loss": 1.6055, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.4995104977948331e-05, |
|
"loss": 1.4253, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.49804103460589e-05, |
|
"loss": 1.3594, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.4965701398425275e-05, |
|
"loss": 1.0459, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4950978177327016e-05, |
|
"loss": 1.2666, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4936240725084701e-05, |
|
"loss": 1.7461, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4921489084059823e-05, |
|
"loss": 1.4785, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4906723296654652e-05, |
|
"loss": 1.6719, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4891943405312126e-05, |
|
"loss": 1.144, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4877149452515718e-05, |
|
"loss": 0.9712, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4862341480789322e-05, |
|
"loss": 1.0283, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.484751953269713e-05, |
|
"loss": 1.5967, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.483268365084351e-05, |
|
"loss": 1.5159, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.481783387787287e-05, |
|
"loss": 1.7822, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4802970256469564e-05, |
|
"loss": 1.0854, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4788092829357736e-05, |
|
"loss": 1.6226, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4773201639301223e-05, |
|
"loss": 1.7637, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4758296729103413e-05, |
|
"loss": 1.793, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4743378141607146e-05, |
|
"loss": 1.1528, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4728445919694563e-05, |
|
"loss": 0.9468, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4713500106287e-05, |
|
"loss": 1.0693, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4698540744344864e-05, |
|
"loss": 1.5283, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4683567876867503e-05, |
|
"loss": 0.8623, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4668581546893086e-05, |
|
"loss": 1.3296, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4653581797498477e-05, |
|
"loss": 1.0474, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4638568671799113e-05, |
|
"loss": 1.7026, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4623542212948887e-05, |
|
"loss": 1.5596, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4608502464140002e-05, |
|
"loss": 1.1543, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4593449468602878e-05, |
|
"loss": 1.2422, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4578383269606004e-05, |
|
"loss": 1.5244, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4563303910455821e-05, |
|
"loss": 1.6787, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4548211434496598e-05, |
|
"loss": 1.5488, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4533105885110307e-05, |
|
"loss": 1.0776, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4517987305716502e-05, |
|
"loss": 1.2285, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4502855739772184e-05, |
|
"loss": 1.0649, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4487711230771686e-05, |
|
"loss": 1.6406, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4472553822246548e-05, |
|
"loss": 1.3579, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4457383557765385e-05, |
|
"loss": 1.1411, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4442200480933761e-05, |
|
"loss": 1.1094, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.442700463539408e-05, |
|
"loss": 1.0181, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4411796064825436e-05, |
|
"loss": 0.9143, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.439657481294351e-05, |
|
"loss": 1.0385, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4381340923500422e-05, |
|
"loss": 1.6094, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4366094440284632e-05, |
|
"loss": 1.1348, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.435083540712079e-05, |
|
"loss": 1.4775, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4335563867869618e-05, |
|
"loss": 1.3032, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4320279866427798e-05, |
|
"loss": 1.4102, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.430498344672782e-05, |
|
"loss": 1.666, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4289674652737877e-05, |
|
"loss": 1.397, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4274353528461728e-05, |
|
"loss": 1.7373, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4259020117938574e-05, |
|
"loss": 1.0601, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4243674465242934e-05, |
|
"loss": 1.1255, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4228316614484511e-05, |
|
"loss": 1.4558, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4212946609808079e-05, |
|
"loss": 1.5928, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4197564495393334e-05, |
|
"loss": 1.6543, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4182170315454798e-05, |
|
"loss": 1.0896, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4166764114241657e-05, |
|
"loss": 1.3135, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4151345936037664e-05, |
|
"loss": 1.1094, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.413591582516099e-05, |
|
"loss": 1.1597, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4120473825964112e-05, |
|
"loss": 0.9634, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4105019982833674e-05, |
|
"loss": 1.1577, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4089554340190365e-05, |
|
"loss": 1.7778, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4074076942488794e-05, |
|
"loss": 1.6846, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4058587834217356e-05, |
|
"loss": 1.415, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.404308705989811e-05, |
|
"loss": 0.9971, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4027574664086641e-05, |
|
"loss": 1.3311, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4012050691371948e-05, |
|
"loss": 1.791, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3996515186376298e-05, |
|
"loss": 1.877, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3980968193755115e-05, |
|
"loss": 1.584, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3965409758196838e-05, |
|
"loss": 1.79, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3949839924422798e-05, |
|
"loss": 1.8086, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3934258737187087e-05, |
|
"loss": 0.8597, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3918666241276442e-05, |
|
"loss": 1.2046, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.390306248151009e-05, |
|
"loss": 1.2397, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3887447502739649e-05, |
|
"loss": 1.5557, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3871821349848977e-05, |
|
"loss": 1.1875, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3856184067754053e-05, |
|
"loss": 1.9082, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3840535701402847e-05, |
|
"loss": 1.2056, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3824876295775187e-05, |
|
"loss": 1.4102, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3809205895882635e-05, |
|
"loss": 1.2913, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3793524546768358e-05, |
|
"loss": 1.4082, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3777832293506985e-05, |
|
"loss": 1.5161, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3762129181204502e-05, |
|
"loss": 1.376, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3746415254998095e-05, |
|
"loss": 1.4937, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3730690560056043e-05, |
|
"loss": 1.5908, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3714955141577573e-05, |
|
"loss": 1.4888, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3699209044792743e-05, |
|
"loss": 1.4258, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3683452314962295e-05, |
|
"loss": 1.3394, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3667684997377542e-05, |
|
"loss": 1.5742, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.365190713736023e-05, |
|
"loss": 1.5703, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3636118780262405e-05, |
|
"loss": 1.4375, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3620319971466288e-05, |
|
"loss": 1.1797, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3604510756384141e-05, |
|
"loss": 1.5332, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3588691180458143e-05, |
|
"loss": 1.1279, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3572861289160245e-05, |
|
"loss": 1.0874, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3557021127992055e-05, |
|
"loss": 1.6123, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3541170742484705e-05, |
|
"loss": 1.6216, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3525310178198707e-05, |
|
"loss": 1.5615, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3509439480723835e-05, |
|
"loss": 1.0244, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3493558695678992e-05, |
|
"loss": 1.1709, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.347766786871207e-05, |
|
"loss": 1.6758, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3461767045499834e-05, |
|
"loss": 1.3013, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3445856271747777e-05, |
|
"loss": 1.5396, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3429935593189996e-05, |
|
"loss": 1.7061, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3414005055589057e-05, |
|
"loss": 1.2766, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3398064704735862e-05, |
|
"loss": 1.5396, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3382114586449533e-05, |
|
"loss": 1.5503, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3366154746577254e-05, |
|
"loss": 1.377, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3350185230994157e-05, |
|
"loss": 1.064, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3334206085603186e-05, |
|
"loss": 0.9751, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3318217356334967e-05, |
|
"loss": 1.7109, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3302219089147675e-05, |
|
"loss": 1.6152, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3286211330026894e-05, |
|
"loss": 0.9763, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3270194124985499e-05, |
|
"loss": 1.1953, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.325416752006351e-05, |
|
"loss": 1.7783, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3238131561327976e-05, |
|
"loss": 1.6875, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3222086294872818e-05, |
|
"loss": 0.9619, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.320603176681873e-05, |
|
"loss": 1.751, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.318996802331301e-05, |
|
"loss": 1.1965, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3173895110529454e-05, |
|
"loss": 1.4597, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3157813074668213e-05, |
|
"loss": 1.6191, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3141721961955658e-05, |
|
"loss": 1.3184, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3125621818644258e-05, |
|
"loss": 1.3213, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3109512691012434e-05, |
|
"loss": 1.7422, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3093394625364433e-05, |
|
"loss": 1.6865, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3077267668030194e-05, |
|
"loss": 1.2798, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3061131865365211e-05, |
|
"loss": 1.5044, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3044987263750409e-05, |
|
"loss": 1.5137, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3028833909591999e-05, |
|
"loss": 1.8379, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3012671849321351e-05, |
|
"loss": 1.4248, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2996501129394872e-05, |
|
"loss": 1.583, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2980321796293838e-05, |
|
"loss": 1.3607, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2964133896524305e-05, |
|
"loss": 1.1348, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2947937476616938e-05, |
|
"loss": 1.0129, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.29317325831269e-05, |
|
"loss": 1.6416, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2915519262633704e-05, |
|
"loss": 1.5244, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2899297561741097e-05, |
|
"loss": 1.3252, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2883067527076904e-05, |
|
"loss": 1.4194, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.286682920529291e-05, |
|
"loss": 1.3179, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2850582643064713e-05, |
|
"loss": 1.5791, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2834327887091611e-05, |
|
"loss": 1.1172, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2818064984096443e-05, |
|
"loss": 1.1431, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.280179398082547e-05, |
|
"loss": 1.5654, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2785514924048235e-05, |
|
"loss": 1.8135, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2769227860557431e-05, |
|
"loss": 1.8877, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2752932837168768e-05, |
|
"loss": 1.5815, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2736629900720832e-05, |
|
"loss": 1.2261, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2720319098074954e-05, |
|
"loss": 1.6338, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2704000476115079e-05, |
|
"loss": 1.5332, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.268767408174763e-05, |
|
"loss": 1.5574, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2671339961901366e-05, |
|
"loss": 1.1802, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.265499816352725e-05, |
|
"loss": 1.2949, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2638648733598325e-05, |
|
"loss": 1.5928, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2622291719109559e-05, |
|
"loss": 1.6592, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2605927167077736e-05, |
|
"loss": 1.3447, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2589555124541293e-05, |
|
"loss": 1.5735, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.25731756385602e-05, |
|
"loss": 1.6133, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.255678875621583e-05, |
|
"loss": 1.4575, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2540394524610808e-05, |
|
"loss": 0.8645, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.252399299086889e-05, |
|
"loss": 1.5134, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2507584202134812e-05, |
|
"loss": 1.0496, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2491168205574175e-05, |
|
"loss": 1.6445, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2474745048373292e-05, |
|
"loss": 1.0144, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2458314777739056e-05, |
|
"loss": 1.5771, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2441877440898817e-05, |
|
"loss": 1.6143, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2425433085100224e-05, |
|
"loss": 1.418, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2408981757611112e-05, |
|
"loss": 1.1021, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2392523505719349e-05, |
|
"loss": 1.4482, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.237605837673271e-05, |
|
"loss": 1.5659, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2359586417978733e-05, |
|
"loss": 1.2413, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2343107676804593e-05, |
|
"loss": 0.8834, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2326622200576956e-05, |
|
"loss": 1.5615, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2310130036681847e-05, |
|
"loss": 0.9961, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2293631232524524e-05, |
|
"loss": 1.6982, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2277125835529318e-05, |
|
"loss": 1.5449, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2260613893139517e-05, |
|
"loss": 1.1851, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2244095452817227e-05, |
|
"loss": 1.6323, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2227570562043223e-05, |
|
"loss": 1.3906, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.221103926831683e-05, |
|
"loss": 1.7168, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.219450161915577e-05, |
|
"loss": 1.4963, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2177957662096038e-05, |
|
"loss": 1.0092, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2161407444691761e-05, |
|
"loss": 1.4785, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2144851014515055e-05, |
|
"loss": 1.2271, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2128288419155903e-05, |
|
"loss": 1.2188, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2111719706222e-05, |
|
"loss": 1.043, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2095144923338633e-05, |
|
"loss": 1.4932, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2078564118148531e-05, |
|
"loss": 1.5688, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2061977338311737e-05, |
|
"loss": 1.064, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2045384631505466e-05, |
|
"loss": 1.749, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.202878604542397e-05, |
|
"loss": 1.7422, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.20121816277784e-05, |
|
"loss": 1.272, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1995571426296671e-05, |
|
"loss": 1.2671, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1978955488723323e-05, |
|
"loss": 1.4087, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1962333862819379e-05, |
|
"loss": 1.5044, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1945706596362216e-05, |
|
"loss": 1.5854, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1929073737145432e-05, |
|
"loss": 1.1011, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1912435332978684e-05, |
|
"loss": 0.9824, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1895791431687585e-05, |
|
"loss": 1.6123, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1879142081113535e-05, |
|
"loss": 1.5532, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1862487329113606e-05, |
|
"loss": 1.4365, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1845827223560394e-05, |
|
"loss": 1.1333, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1829161812341883e-05, |
|
"loss": 1.7139, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1812491143361305e-05, |
|
"loss": 1.0938, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1795815264537008e-05, |
|
"loss": 1.1064, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1779134223802315e-05, |
|
"loss": 1.665, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1762448069105387e-05, |
|
"loss": 1.0991, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1745756848409083e-05, |
|
"loss": 1.6597, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1729060609690824e-05, |
|
"loss": 1.6309, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.171235940094245e-05, |
|
"loss": 1.647, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1695653270170102e-05, |
|
"loss": 0.7725, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1678942265394055e-05, |
|
"loss": 1.5474, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1662226434648595e-05, |
|
"loss": 1.8389, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1645505825981884e-05, |
|
"loss": 1.2207, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.162878048745582e-05, |
|
"loss": 1.5566, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.161205046714589e-05, |
|
"loss": 1.6851, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1595315813141041e-05, |
|
"loss": 1.4443, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1578576573543541e-05, |
|
"loss": 1.1045, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1561832796468837e-05, |
|
"loss": 1.1906, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1545084530045419e-05, |
|
"loss": 1.4688, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1528331822414681e-05, |
|
"loss": 1.8008, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1511574721730781e-05, |
|
"loss": 1.4004, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1494813276160511e-05, |
|
"loss": 1.7275, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1478047533883143e-05, |
|
"loss": 1.1919, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1461277543090308e-05, |
|
"loss": 0.9951, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.144450335198584e-05, |
|
"loss": 1.3525, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1427725008785655e-05, |
|
"loss": 1.2007, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1410942561717601e-05, |
|
"loss": 1.1406, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.139415605902132e-05, |
|
"loss": 1.5728, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1377365548948114e-05, |
|
"loss": 1.5947, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1360571079760798e-05, |
|
"loss": 1.1715, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1343772699733579e-05, |
|
"loss": 1.4805, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.132697045715189e-05, |
|
"loss": 1.8428, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1310164400312284e-05, |
|
"loss": 1.4932, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1293354577522264e-05, |
|
"loss": 1.2031, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.127654103710016e-05, |
|
"loss": 1.1465, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1259723827374996e-05, |
|
"loss": 1.5317, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1242902996686333e-05, |
|
"loss": 1.4526, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.122607859338415e-05, |
|
"loss": 1.1372, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1209250665828684e-05, |
|
"loss": 1.3848, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.119241926239031e-05, |
|
"loss": 1.752, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1175584431449393e-05, |
|
"loss": 1.4453, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1158746221396148e-05, |
|
"loss": 1.4961, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1141904680630504e-05, |
|
"loss": 1.5713, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1125059857561966e-05, |
|
"loss": 1.1006, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.110821180060947e-05, |
|
"loss": 1.2229, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1091360558201249e-05, |
|
"loss": 1.083, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1074506178774696e-05, |
|
"loss": 1.6172, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1057648710776212e-05, |
|
"loss": 1.7012, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1040788202661084e-05, |
|
"loss": 1.3699, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1023924702893334e-05, |
|
"loss": 1.0747, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1007058259945584e-05, |
|
"loss": 1.5249, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.0990188922298918e-05, |
|
"loss": 1.1157, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0973316738442738e-05, |
|
"loss": 1.4419, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0956441756874628e-05, |
|
"loss": 1.4354, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0939564026100213e-05, |
|
"loss": 1.4668, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.092268359463302e-05, |
|
"loss": 1.0879, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0905800510994341e-05, |
|
"loss": 1.0923, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0888914823713092e-05, |
|
"loss": 1.4277, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0872026581325667e-05, |
|
"loss": 1.6455, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0855135832375809e-05, |
|
"loss": 1.2954, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.083824262541447e-05, |
|
"loss": 1.4199, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0821347008999658e-05, |
|
"loss": 1.1072, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0804449031696315e-05, |
|
"loss": 1.1694, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0787548742076164e-05, |
|
"loss": 1.4644, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0770646188717578e-05, |
|
"loss": 1.4917, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0753741420205431e-05, |
|
"loss": 1.3276, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0736834485130972e-05, |
|
"loss": 1.1748, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0719925432091671e-05, |
|
"loss": 1.8076, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0703014309691096e-05, |
|
"loss": 0.9493, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.068610116653875e-05, |
|
"loss": 1.5107, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0669186051249948e-05, |
|
"loss": 1.4468, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0652269012445684e-05, |
|
"loss": 1.4751, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0635350098752468e-05, |
|
"loss": 1.1997, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0618429358802209e-05, |
|
"loss": 1.4517, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0601506841232057e-05, |
|
"loss": 1.4814, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0584582594684274e-05, |
|
"loss": 1.3149, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0567656667806097e-05, |
|
"loss": 1.7627, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.055072910924959e-05, |
|
"loss": 1.6982, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0533799967671503e-05, |
|
"loss": 1.0278, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0516869291733142e-05, |
|
"loss": 1.5518, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0499937130100222e-05, |
|
"loss": 1.4204, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0483003531442726e-05, |
|
"loss": 1.6123, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0466068544434766e-05, |
|
"loss": 1.3213, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0449132217754455e-05, |
|
"loss": 1.5708, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.043219460008374e-05, |
|
"loss": 1.4751, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0415255740108299e-05, |
|
"loss": 1.3486, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0398315686517362e-05, |
|
"loss": 1.3721, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0381374488003603e-05, |
|
"loss": 1.1133, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.036443219326298e-05, |
|
"loss": 1.0, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0347488850994608e-05, |
|
"loss": 1.1328, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0330544509900603e-05, |
|
"loss": 1.6392, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0313599218685961e-05, |
|
"loss": 1.0771, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.029665302605841e-05, |
|
"loss": 1.5693, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0279705980728258e-05, |
|
"loss": 1.1655, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.026275813140828e-05, |
|
"loss": 0.947, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0245809526813547e-05, |
|
"loss": 1.4395, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0228860215661309e-05, |
|
"loss": 1.2583, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0211910246670848e-05, |
|
"loss": 0.8533, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0194959668563332e-05, |
|
"loss": 1.286, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.017800853006168e-05, |
|
"loss": 1.1396, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0161056879890427e-05, |
|
"loss": 1.0344, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0144104766775574e-05, |
|
"loss": 1.9268, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0127152239444453e-05, |
|
"loss": 1.5415, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0110199346625593e-05, |
|
"loss": 1.1924, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0093246137048563e-05, |
|
"loss": 1.6943, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0076292659443852e-05, |
|
"loss": 1.272, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0059338962542713e-05, |
|
"loss": 1.5254, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0042385095077032e-05, |
|
"loss": 1.6367, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0025431105779184e-05, |
|
"loss": 1.6484, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0008477043381896e-05, |
|
"loss": 1.5859, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.991522956618107e-06, |
|
"loss": 1.168, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.974568894220821e-06, |
|
"loss": 1.6069, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.957614904922971e-06, |
|
"loss": 1.5088, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.940661037457294e-06, |
|
"loss": 1.1694, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.923707340556151e-06, |
|
"loss": 1.5801, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.906753862951437e-06, |
|
"loss": 1.7764, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.88980065337441e-06, |
|
"loss": 1.625, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.872847760555547e-06, |
|
"loss": 1.1943, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.855895233224431e-06, |
|
"loss": 0.9604, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.838943120109576e-06, |
|
"loss": 1.5815, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.821991469938325e-06, |
|
"loss": 1.4517, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.805040331436672e-06, |
|
"loss": 1.1016, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.788089753329157e-06, |
|
"loss": 1.623, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.771139784338693e-06, |
|
"loss": 1.3481, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.754190473186455e-06, |
|
"loss": 1.4707, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.737241868591724e-06, |
|
"loss": 1.5947, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.720294019271744e-06, |
|
"loss": 1.7764, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.703346973941595e-06, |
|
"loss": 1.7031, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.686400781314042e-06, |
|
"loss": 1.3486, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.669455490099402e-06, |
|
"loss": 1.2261, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.652511149005396e-06, |
|
"loss": 1.2632, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.635567806737021e-06, |
|
"loss": 1.7065, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.618625511996398e-06, |
|
"loss": 1.0562, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.601684313482638e-06, |
|
"loss": 1.4648, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.584744259891706e-06, |
|
"loss": 1.1753, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.56780539991626e-06, |
|
"loss": 1.4387, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.55086778224555e-06, |
|
"loss": 1.6699, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.533931455565236e-06, |
|
"loss": 1.543, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.51699646855728e-06, |
|
"loss": 1.4912, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.50006286989978e-06, |
|
"loss": 1.0615, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.483130708266856e-06, |
|
"loss": 1.0679, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.466200032328499e-06, |
|
"loss": 1.5146, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.44927089075041e-06, |
|
"loss": 1.1787, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.432343332193907e-06, |
|
"loss": 1.4238, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.415417405315727e-06, |
|
"loss": 1.5024, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.39849315876795e-06, |
|
"loss": 1.2, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.381570641197795e-06, |
|
"loss": 1.2314, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.364649901247534e-06, |
|
"loss": 1.3638, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.34773098755432e-06, |
|
"loss": 1.6426, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.330813948750053e-06, |
|
"loss": 1.5254, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.313898833461255e-06, |
|
"loss": 1.2178, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.29698569030891e-06, |
|
"loss": 1.3511, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.28007456790833e-06, |
|
"loss": 1.0898, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.26316551486903e-06, |
|
"loss": 1.4395, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.246258579794574e-06, |
|
"loss": 1.6514, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.229353811282427e-06, |
|
"loss": 1.1983, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.212451257923841e-06, |
|
"loss": 1.4531, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.195550968303688e-06, |
|
"loss": 1.5723, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.178652991000342e-06, |
|
"loss": 1.4722, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.161757374585533e-06, |
|
"loss": 1.499, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.144864167624191e-06, |
|
"loss": 1.1548, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.127973418674338e-06, |
|
"loss": 1.7676, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.111085176286912e-06, |
|
"loss": 1.6885, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.094199489005664e-06, |
|
"loss": 1.1021, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.07731640536698e-06, |
|
"loss": 1.1333, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.06043597389979e-06, |
|
"loss": 1.6533, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.043558243125374e-06, |
|
"loss": 1.2778, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.026683261557263e-06, |
|
"loss": 1.1177, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.009811077701083e-06, |
|
"loss": 1.6318, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.992941740054418e-06, |
|
"loss": 1.4219, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.97607529710667e-06, |
|
"loss": 1.6226, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.959211797338919e-06, |
|
"loss": 1.4028, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.942351289223795e-06, |
|
"loss": 0.9825, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.925493821225309e-06, |
|
"loss": 1.1401, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.908639441798751e-06, |
|
"loss": 1.3823, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.891788199390533e-06, |
|
"loss": 1.7295, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.874940142438036e-06, |
|
"loss": 1.4253, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.858095319369499e-06, |
|
"loss": 1.2427, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.841253778603855e-06, |
|
"loss": 1.5645, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.824415568550612e-06, |
|
"loss": 1.5098, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.807580737609693e-06, |
|
"loss": 1.5322, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.790749334171323e-06, |
|
"loss": 1.5254, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.773921406615854e-06, |
|
"loss": 1.0981, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.757097003313665e-06, |
|
"loss": 1.042, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.740276172625007e-06, |
|
"loss": 1.1118, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.72345896289984e-06, |
|
"loss": 1.0481, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.706645422477739e-06, |
|
"loss": 0.8436, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.689835599687717e-06, |
|
"loss": 1.4683, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.67302954284811e-06, |
|
"loss": 1.7275, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.656227300266425e-06, |
|
"loss": 1.7979, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.639428920239205e-06, |
|
"loss": 1.5439, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.62263445105189e-06, |
|
"loss": 1.6797, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.60584394097868e-06, |
|
"loss": 1.3809, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.5890574382824e-06, |
|
"loss": 1.6143, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.572274991214345e-06, |
|
"loss": 1.4644, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.555496648014163e-06, |
|
"loss": 1.3818, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.538722456909694e-06, |
|
"loss": 1.4023, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.521952466116862e-06, |
|
"loss": 1.3374, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.505186723839492e-06, |
|
"loss": 1.1001, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.488425278269224e-06, |
|
"loss": 1.0283, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.471668177585324e-06, |
|
"loss": 1.5352, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.454915469954583e-06, |
|
"loss": 1.1626, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.438167203531166e-06, |
|
"loss": 1.457, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.42142342645646e-06, |
|
"loss": 1.7764, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.404684186858962e-06, |
|
"loss": 1.0559, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.387949532854113e-06, |
|
"loss": 0.8921, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.371219512544182e-06, |
|
"loss": 1.4619, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.354494174018118e-06, |
|
"loss": 1.2764, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.337773565351407e-06, |
|
"loss": 1.6602, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.321057734605949e-06, |
|
"loss": 1.4209, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.3043467298299e-06, |
|
"loss": 0.9824, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.287640599057551e-06, |
|
"loss": 0.999, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.270939390309181e-06, |
|
"loss": 1.7012, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.254243151590922e-06, |
|
"loss": 1.4688, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.237551930894616e-06, |
|
"loss": 1.4844, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.22086577619769e-06, |
|
"loss": 1.5173, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.204184735462994e-06, |
|
"loss": 1.6831, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.187508856638697e-06, |
|
"loss": 1.0901, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.17083818765812e-06, |
|
"loss": 1.0029, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.154172776439608e-06, |
|
"loss": 1.1416, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.137512670886397e-06, |
|
"loss": 1.5396, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.120857918886467e-06, |
|
"loss": 1.2782, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.10420856831242e-06, |
|
"loss": 1.3452, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.087564667021317e-06, |
|
"loss": 1.4873, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.070926262854573e-06, |
|
"loss": 1.1948, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.054293403637787e-06, |
|
"loss": 1.7705, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.037666137180624e-06, |
|
"loss": 1.5262, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.021044511276682e-06, |
|
"loss": 1.5522, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.00442857370333e-06, |
|
"loss": 1.624, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.987818372221603e-06, |
|
"loss": 1.4229, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.971213954576031e-06, |
|
"loss": 0.9995, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.954615368494538e-06, |
|
"loss": 1.0386, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.938022661688265e-06, |
|
"loss": 1.2856, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.921435881851474e-06, |
|
"loss": 1.4165, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.90485507666137e-06, |
|
"loss": 1.238, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.888280293778001e-06, |
|
"loss": 1.2002, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.871711580844102e-06, |
|
"loss": 1.2422, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.855148985484946e-06, |
|
"loss": 1.0547, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.838592555308244e-06, |
|
"loss": 1.6445, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.822042337903964e-06, |
|
"loss": 1.5674, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.805498380844236e-06, |
|
"loss": 1.5283, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.788960731683174e-06, |
|
"loss": 1.6382, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.772429437956777e-06, |
|
"loss": 1.3384, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.755904547182778e-06, |
|
"loss": 1.3628, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.739386106860485e-06, |
|
"loss": 1.1421, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.722874164470685e-06, |
|
"loss": 1.4028, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.70636876747548e-06, |
|
"loss": 1.6289, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.689869963318154e-06, |
|
"loss": 1.2715, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.673377799423047e-06, |
|
"loss": 1.2466, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.656892323195414e-06, |
|
"loss": 1.0854, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.64041358202127e-06, |
|
"loss": 1.6826, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.6239416232672904e-06, |
|
"loss": 1.2422, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.607476494280653e-06, |
|
"loss": 1.6172, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.591018242388888e-06, |
|
"loss": 1.2598, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.574566914899779e-06, |
|
"loss": 1.415, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.5581225591011864e-06, |
|
"loss": 1.3315, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.541685222260948e-06, |
|
"loss": 1.8037, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.525254951626712e-06, |
|
"loss": 1.2563, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.5088317944258305e-06, |
|
"loss": 1.4981, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.492415797865191e-06, |
|
"loss": 1.3926, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.476007009131112e-06, |
|
"loss": 1.751, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.459605475389193e-06, |
|
"loss": 1.001, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.44321124378417e-06, |
|
"loss": 1.4009, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.426824361439803e-06, |
|
"loss": 1.354, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.41044487545871e-06, |
|
"loss": 1.2329, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.394072832922268e-06, |
|
"loss": 1.3438, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.3777082808904435e-06, |
|
"loss": 1.0591, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.361351266401682e-06, |
|
"loss": 1.4851, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.345001836472754e-06, |
|
"loss": 1.4868, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.328660038098636e-06, |
|
"loss": 1.4688, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.312325918252373e-06, |
|
"loss": 1.3389, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.295999523884921e-06, |
|
"loss": 1.5938, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.279680901925051e-06, |
|
"loss": 0.9379, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.263370099279173e-06, |
|
"loss": 1.4214, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.247067162831237e-06, |
|
"loss": 1.0825, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.230772139442572e-06, |
|
"loss": 1.124, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.214485075951771e-06, |
|
"loss": 1.666, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.198206019174534e-06, |
|
"loss": 0.8665, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.181935015903558e-06, |
|
"loss": 1.3301, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.165672112908393e-06, |
|
"loss": 1.5352, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.149417356935289e-06, |
|
"loss": 1.2217, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.133170794707096e-06, |
|
"loss": 1.0522, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.116932472923098e-06, |
|
"loss": 1.0388, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.100702438258905e-06, |
|
"loss": 1.3281, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.084480737366297e-06, |
|
"loss": 1.4253, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.068267416873102e-06, |
|
"loss": 1.5098, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.052062523383065e-06, |
|
"loss": 1.2988, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.035866103475698e-06, |
|
"loss": 1.4795, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.019678203706164e-06, |
|
"loss": 1.707, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.0034988706051325e-06, |
|
"loss": 1.6602, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.987328150678652e-06, |
|
"loss": 1.4771, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.9711660904080055e-06, |
|
"loss": 1.0928, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.955012736249598e-06, |
|
"loss": 1.4332, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.938868134634793e-06, |
|
"loss": 1.2715, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.922732331969808e-06, |
|
"loss": 1.0107, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.90660537463557e-06, |
|
"loss": 1.5151, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.890487308987567e-06, |
|
"loss": 0.9897, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.8743781813557455e-06, |
|
"loss": 1.1364, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.858278038044344e-06, |
|
"loss": 1.6895, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.842186925331793e-06, |
|
"loss": 1.6377, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.8261048894705505e-06, |
|
"loss": 1.1216, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.810031976686994e-06, |
|
"loss": 1.2217, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.793968233181274e-06, |
|
"loss": 1.6836, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.777913705127181e-06, |
|
"loss": 1.7158, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.761868438672028e-06, |
|
"loss": 1.5767, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.745832479936492e-06, |
|
"loss": 1.1069, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.729805875014506e-06, |
|
"loss": 1.3071, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.713788669973108e-06, |
|
"loss": 1.6152, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.6977809108523295e-06, |
|
"loss": 1.2695, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.6817826436650335e-06, |
|
"loss": 1.1567, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.665793914396818e-06, |
|
"loss": 1.0889, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.649814769005847e-06, |
|
"loss": 1.4072, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.633845253422748e-06, |
|
"loss": 1.4688, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.617885413550471e-06, |
|
"loss": 1.5273, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.601935295264139e-06, |
|
"loss": 1.3323, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.5859949444109495e-06, |
|
"loss": 1.4219, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.570064406810008e-06, |
|
"loss": 1.3032, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.554143728252229e-06, |
|
"loss": 1.0376, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.53823295450017e-06, |
|
"loss": 1.6396, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.522332131287931e-06, |
|
"loss": 1.4668, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.506441304321012e-06, |
|
"loss": 0.9971, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.4905605192761656e-06, |
|
"loss": 1.0859, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.474689821801295e-06, |
|
"loss": 1.5293, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.4588292575152955e-06, |
|
"loss": 1.3582, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.442978872007946e-06, |
|
"loss": 0.8848, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.42713871083976e-06, |
|
"loss": 1.6592, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.411308819541863e-06, |
|
"loss": 0.9512, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.395489243615861e-06, |
|
"loss": 1.3691, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.379680028533714e-06, |
|
"loss": 1.6475, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.363881219737599e-06, |
|
"loss": 1.1069, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.348092862639772e-06, |
|
"loss": 1.0828, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.332315002622461e-06, |
|
"loss": 1.2798, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.316547685037707e-06, |
|
"loss": 1.0975, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.300790955207262e-06, |
|
"loss": 1.3804, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.285044858422429e-06, |
|
"loss": 1.459, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.269309439943963e-06, |
|
"loss": 0.998, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.253584745001908e-06, |
|
"loss": 1.1904, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.237870818795499e-06, |
|
"loss": 1.3691, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.222167706493016e-06, |
|
"loss": 1.2441, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.206475453231644e-06, |
|
"loss": 1.6738, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.190794104117368e-06, |
|
"loss": 1.1246, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.175123704224814e-06, |
|
"loss": 1.1133, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.1594642985971565e-06, |
|
"loss": 1.4961, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.143815932245951e-06, |
|
"loss": 1.0352, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.128178650151026e-06, |
|
"loss": 1.4941, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.112552497260353e-06, |
|
"loss": 1.438, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.096937518489912e-06, |
|
"loss": 1.0801, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.081333758723562e-06, |
|
"loss": 1.0972, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.065741262812914e-06, |
|
"loss": 1.6309, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.0501600755772074e-06, |
|
"loss": 1.4404, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.0345902418031645e-06, |
|
"loss": 1.4258, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.019031806244888e-06, |
|
"loss": 1.5029, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.0034848136237035e-06, |
|
"loss": 1.5479, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.987949308628054e-06, |
|
"loss": 1.6562, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.9724253359133614e-06, |
|
"loss": 0.9883, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.956912940101891e-06, |
|
"loss": 1.1201, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.941412165782645e-06, |
|
"loss": 1.021, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.925923057511207e-06, |
|
"loss": 1.6016, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.910445659809638e-06, |
|
"loss": 1.4399, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.894980017166329e-06, |
|
"loss": 0.9529, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.87952617403589e-06, |
|
"loss": 1.542, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.864084174839013e-06, |
|
"loss": 1.6528, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.848654063962338e-06, |
|
"loss": 0.9795, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.833235885758346e-06, |
|
"loss": 1.3682, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.817829684545204e-06, |
|
"loss": 1.4277, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.80243550460667e-06, |
|
"loss": 1.1196, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.787053390191925e-06, |
|
"loss": 1.3594, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.771683385515495e-06, |
|
"loss": 0.9307, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.75632553475707e-06, |
|
"loss": 1.4297, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.740979882061433e-06, |
|
"loss": 1.3069, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.7256464715382754e-06, |
|
"loss": 1.5448, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.710325347262125e-06, |
|
"loss": 0.9534, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.695016553272182e-06, |
|
"loss": 1.583, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.6797201335722064e-06, |
|
"loss": 1.4493, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.664436132130384e-06, |
|
"loss": 1.3965, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.649164592879213e-06, |
|
"loss": 1.39, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.6339055597153734e-06, |
|
"loss": 1.8262, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.618659076499578e-06, |
|
"loss": 1.2109, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.603425187056496e-06, |
|
"loss": 1.5771, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.588203935174563e-06, |
|
"loss": 1.5557, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.572995364605921e-06, |
|
"loss": 1.125, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.55779951906624e-06, |
|
"loss": 1.2627, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.542616442234618e-06, |
|
"loss": 1.6387, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.527446177753455e-06, |
|
"loss": 1.5566, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.512288769228318e-06, |
|
"loss": 1.0342, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.49714426022782e-06, |
|
"loss": 1.5405, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.482012694283503e-06, |
|
"loss": 1.2358, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.466894114889697e-06, |
|
"loss": 1.6763, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.451788565503406e-06, |
|
"loss": 1.2627, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.43669608954418e-06, |
|
"loss": 1.4429, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.421616730394e-06, |
|
"loss": 0.9888, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.406550531397123e-06, |
|
"loss": 1.2554, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.391497535859999e-06, |
|
"loss": 1.5898, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.376457787051117e-06, |
|
"loss": 0.9497, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.361431328200888e-06, |
|
"loss": 1.3229, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.346418202501526e-06, |
|
"loss": 1.0796, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.331418453106916e-06, |
|
"loss": 1.2529, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.316432123132498e-06, |
|
"loss": 1.5146, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.301459255655134e-06, |
|
"loss": 1.4546, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.286499893713003e-06, |
|
"loss": 1.5205, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.271554080305439e-06, |
|
"loss": 1.5762, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.256621858392861e-06, |
|
"loss": 1.4277, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.241703270896589e-06, |
|
"loss": 1.6465, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.226798360698786e-06, |
|
"loss": 1.6914, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.211907170642267e-06, |
|
"loss": 1.5449, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1970297435304395e-06, |
|
"loss": 1.3711, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.18216612212713e-06, |
|
"loss": 1.4985, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.167316349156495e-06, |
|
"loss": 0.9785, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1524804673028725e-06, |
|
"loss": 1.5967, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1376585192106824e-06, |
|
"loss": 1.4902, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1228505474842885e-06, |
|
"loss": 1.6733, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.10805659468788e-06, |
|
"loss": 1.6499, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.093276703345351e-06, |
|
"loss": 1.2339, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.078510915940178e-06, |
|
"loss": 1.7031, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.063759274915299e-06, |
|
"loss": 1.4893, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.0490218226729855e-06, |
|
"loss": 1.0605, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.034298601574726e-06, |
|
"loss": 1.5371, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.019589653941103e-06, |
|
"loss": 0.8093, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.0048950220516725e-06, |
|
"loss": 1.408, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.990214748144837e-06, |
|
"loss": 1.0444, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.975548874417733e-06, |
|
"loss": 1.3027, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.9608974430261025e-06, |
|
"loss": 1.4287, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.946260496084175e-06, |
|
"loss": 1.3066, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.931638075664544e-06, |
|
"loss": 1.3317, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.917030223798057e-06, |
|
"loss": 0.9253, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.9024369824736654e-06, |
|
"loss": 1.3726, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.88785839363835e-06, |
|
"loss": 1.1211, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.873294499196946e-06, |
|
"loss": 1.4985, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.85874534101208e-06, |
|
"loss": 0.9644, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.844210960903998e-06, |
|
"loss": 1.3154, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.829691400650478e-06, |
|
"loss": 1.4136, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.8151867019866985e-06, |
|
"loss": 1.207, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.800696906605117e-06, |
|
"loss": 1.0835, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.786222056155356e-06, |
|
"loss": 1.6274, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.771762192244075e-06, |
|
"loss": 1.5713, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.757317356434869e-06, |
|
"loss": 0.8888, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.742887590248114e-06, |
|
"loss": 1.4561, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7284729351608985e-06, |
|
"loss": 0.9648, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.714073432606841e-06, |
|
"loss": 1.4858, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.699689123976039e-06, |
|
"loss": 1.2949, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.6853200506148955e-06, |
|
"loss": 0.8962, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.670966253826027e-06, |
|
"loss": 1.6348, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.65662777486814e-06, |
|
"loss": 1.0588, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.6423046549559095e-06, |
|
"loss": 1.1338, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.627996935259862e-06, |
|
"loss": 1.3716, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.6137046569062595e-06, |
|
"loss": 1.4316, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.599427860976978e-06, |
|
"loss": 1.6514, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.585166588509391e-06, |
|
"loss": 0.8566, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.570920880496247e-06, |
|
"loss": 1.5576, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.556690777885571e-06, |
|
"loss": 1.6045, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.542476321580505e-06, |
|
"loss": 1.4712, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.528277552439244e-06, |
|
"loss": 1.3672, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.514094511274877e-06, |
|
"loss": 0.9143, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.499927238855287e-06, |
|
"loss": 1.521, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.485775775903033e-06, |
|
"loss": 1.335, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.471640163095229e-06, |
|
"loss": 0.9832, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.457520441063428e-06, |
|
"loss": 1.2444, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.443416650393509e-06, |
|
"loss": 1.1521, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.429328831625565e-06, |
|
"loss": 1.3042, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.415257025253758e-06, |
|
"loss": 0.9712, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.40120127172625e-06, |
|
"loss": 1.1553, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.387161611445034e-06, |
|
"loss": 1.4805, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.373138084765875e-06, |
|
"loss": 1.7056, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.359130731998127e-06, |
|
"loss": 1.8389, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.345139593404688e-06, |
|
"loss": 1.2964, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.331164709201829e-06, |
|
"loss": 1.0322, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.317206119559108e-06, |
|
"loss": 1.252, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.303263864599239e-06, |
|
"loss": 1.1704, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.289337984397988e-06, |
|
"loss": 1.0161, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.275428518984054e-06, |
|
"loss": 1.1133, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.261535508338951e-06, |
|
"loss": 1.377, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2476589923968946e-06, |
|
"loss": 1.5605, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.233799011044687e-06, |
|
"loss": 1.7144, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.219955604121611e-06, |
|
"loss": 1.4209, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.206128811419299e-06, |
|
"loss": 1.4873, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.192318672681631e-06, |
|
"loss": 1.293, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.178525227604615e-06, |
|
"loss": 1.105, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.164748515836276e-06, |
|
"loss": 1.5015, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.150988576976539e-06, |
|
"loss": 1.7388, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1372454505771186e-06, |
|
"loss": 1.6494, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1235191761414025e-06, |
|
"loss": 1.1504, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.10980979312434e-06, |
|
"loss": 1.6826, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.096117340932322e-06, |
|
"loss": 1.0933, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.082441858923088e-06, |
|
"loss": 1.0, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.068783386405577e-06, |
|
"loss": 1.3618, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.055141962639858e-06, |
|
"loss": 1.5786, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0415176268369714e-06, |
|
"loss": 1.4453, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.027910418158861e-06, |
|
"loss": 1.3398, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.01432037571823e-06, |
|
"loss": 1.1853, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.00074753857844e-06, |
|
"loss": 1.2422, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.987191945753396e-06, |
|
"loss": 1.0767, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.973653636207437e-06, |
|
"loss": 1.0293, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.960132648855226e-06, |
|
"loss": 1.0918, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.946629022561627e-06, |
|
"loss": 1.5762, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.933142796141616e-06, |
|
"loss": 1.3801, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.919674008360133e-06, |
|
"loss": 1.2039, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.906222697932016e-06, |
|
"loss": 1.5615, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.89278890352184e-06, |
|
"loss": 0.9858, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.879372663743862e-06, |
|
"loss": 1.0029, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8659740171618485e-06, |
|
"loss": 1.5, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.85259300228902e-06, |
|
"loss": 1.2031, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.839229657587906e-06, |
|
"loss": 1.0615, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.825884021470244e-06, |
|
"loss": 1.5557, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.812556132296873e-06, |
|
"loss": 1.6191, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.799246028377618e-06, |
|
"loss": 1.0586, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.785953747971183e-06, |
|
"loss": 1.4219, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7726793292850382e-06, |
|
"loss": 1.6426, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7594228104753107e-06, |
|
"loss": 1.0276, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.746184229646689e-06, |
|
"loss": 1.3828, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.732963624852275e-06, |
|
"loss": 1.3428, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7197610340935263e-06, |
|
"loss": 1.5903, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7065764953201066e-06, |
|
"loss": 1.3765, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.693410046429793e-06, |
|
"loss": 1.2174, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.680261725268364e-06, |
|
"loss": 0.9263, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.667131569629495e-06, |
|
"loss": 1.2881, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.654019617254644e-06, |
|
"loss": 1.6083, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6409259058329404e-06, |
|
"loss": 1.2178, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6278504730011e-06, |
|
"loss": 0.9836, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.614793356343269e-06, |
|
"loss": 0.9719, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.601754593390978e-06, |
|
"loss": 1.5381, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5887342216229694e-06, |
|
"loss": 0.958, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5757322784651506e-06, |
|
"loss": 0.9895, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5627488012904333e-06, |
|
"loss": 1.1216, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5497838274186737e-06, |
|
"loss": 1.1196, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5368373941165167e-06, |
|
"loss": 1.4614, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5239095385973377e-06, |
|
"loss": 1.6934, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.511000298021098e-06, |
|
"loss": 1.0481, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.498109709494254e-06, |
|
"loss": 1.4438, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.4852378100696505e-06, |
|
"loss": 1.5527, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.472384636746412e-06, |
|
"loss": 1.1362, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4595502264698356e-06, |
|
"loss": 0.8755, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.446734616131282e-06, |
|
"loss": 1.4756, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.433937842568088e-06, |
|
"loss": 1.0354, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.421159942563421e-06, |
|
"loss": 1.5942, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4084009528462227e-06, |
|
"loss": 1.2891, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3956609100910643e-06, |
|
"loss": 1.5771, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.382939850918059e-06, |
|
"loss": 1.1787, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3702378118927537e-06, |
|
"loss": 1.5923, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.357554829526025e-06, |
|
"loss": 1.5088, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3448909402739716e-06, |
|
"loss": 1.1001, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3322461805378094e-06, |
|
"loss": 1.7988, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3196205866637667e-06, |
|
"loss": 1.374, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3070141949429945e-06, |
|
"loss": 1.438, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2944270416114256e-06, |
|
"loss": 0.9634, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.28185916284972e-06, |
|
"loss": 1.7334, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.269310594783106e-06, |
|
"loss": 1.5957, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2567813734813336e-06, |
|
"loss": 1.6094, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.244271534958523e-06, |
|
"loss": 1.4438, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.231781115173088e-06, |
|
"loss": 1.6509, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.2193101500276226e-06, |
|
"loss": 1.6836, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.2068586753688014e-06, |
|
"loss": 0.8369, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1944267269872754e-06, |
|
"loss": 1.7139, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1820143406175642e-06, |
|
"loss": 1.3462, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1696215519379726e-06, |
|
"loss": 1.3364, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1572483965704503e-06, |
|
"loss": 1.5947, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1448949100805394e-06, |
|
"loss": 1.5063, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1325611279772194e-06, |
|
"loss": 1.2188, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1202470857128564e-06, |
|
"loss": 1.3438, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.107952818683052e-06, |
|
"loss": 1.3848, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.095678362226585e-06, |
|
"loss": 1.1208, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0834237516252817e-06, |
|
"loss": 1.4932, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.071189022103923e-06, |
|
"loss": 1.4446, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.058974208830147e-06, |
|
"loss": 1.2275, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.046779346914339e-06, |
|
"loss": 1.1867, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0346044714095425e-06, |
|
"loss": 1.5039, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.022449617311346e-06, |
|
"loss": 0.9829, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0103148195577914e-06, |
|
"loss": 1.5352, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.9982001130292703e-06, |
|
"loss": 1.1475, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.98610553254842e-06, |
|
"loss": 0.9854, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.974031112880037e-06, |
|
"loss": 1.377, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.961976888730956e-06, |
|
"loss": 1.5234, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9499428947499696e-06, |
|
"loss": 1.2074, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9379291655277143e-06, |
|
"loss": 1.6924, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.925935735596582e-06, |
|
"loss": 1.5874, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.913962639430615e-06, |
|
"loss": 1.5659, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9020099114454046e-06, |
|
"loss": 1.4717, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.890077585998007e-06, |
|
"loss": 1.6348, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.878165697386812e-06, |
|
"loss": 1.6162, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.866274279851491e-06, |
|
"loss": 1.5415, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.854403367572849e-06, |
|
"loss": 1.3303, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8425529946727725e-06, |
|
"loss": 1.0278, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.830723195214088e-06, |
|
"loss": 0.9565, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.818914003200508e-06, |
|
"loss": 1.2854, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.807125452576487e-06, |
|
"loss": 0.9307, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7953575772271667e-06, |
|
"loss": 1.5864, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7836104109782525e-06, |
|
"loss": 1.7065, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7718839875959213e-06, |
|
"loss": 0.9014, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.760178340786729e-06, |
|
"loss": 1.4844, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7484935041975113e-06, |
|
"loss": 1.7832, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7368295114152852e-06, |
|
"loss": 1.1343, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7251863959671533e-06, |
|
"loss": 1.1357, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.713564191320217e-06, |
|
"loss": 1.6123, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7019629308814544e-06, |
|
"loss": 1.6113, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6903826479976582e-06, |
|
"loss": 1.0098, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.678823375955314e-06, |
|
"loss": 1.3789, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.667285147980515e-06, |
|
"loss": 0.9153, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6557679972388674e-06, |
|
"loss": 1.0869, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6442719568353894e-06, |
|
"loss": 0.9136, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6327970598144214e-06, |
|
"loss": 0.9133, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.62134333915953e-06, |
|
"loss": 1.0286, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6099108277934105e-06, |
|
"loss": 1.4067, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.598499558577794e-06, |
|
"loss": 1.5781, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5871095643133502e-06, |
|
"loss": 1.7129, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.57574087773961e-06, |
|
"loss": 1.4795, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.564393531534832e-06, |
|
"loss": 1.4468, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.553067558315958e-06, |
|
"loss": 1.0122, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5417629906384834e-06, |
|
"loss": 1.3931, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.5304798609963754e-06, |
|
"loss": 2.0332, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.5192182018219826e-06, |
|
"loss": 1.2471, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.507978045485937e-06, |
|
"loss": 1.3945, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.496759424297062e-06, |
|
"loss": 0.9614, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.485562370502279e-06, |
|
"loss": 1.5908, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.474386916286526e-06, |
|
"loss": 1.22, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4632330937726356e-06, |
|
"loss": 0.9996, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4521009350212854e-06, |
|
"loss": 1.4648, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4409904720308576e-06, |
|
"loss": 1.4282, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4299017367373966e-06, |
|
"loss": 1.5293, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4188347610144693e-06, |
|
"loss": 1.2124, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.407789576673115e-06, |
|
"loss": 1.5078, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.396766215461728e-06, |
|
"loss": 1.4512, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.385764709065973e-06, |
|
"loss": 1.2773, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3747850891086956e-06, |
|
"loss": 0.9736, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.363827387149833e-06, |
|
"loss": 1.3001, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3528916346863185e-06, |
|
"loss": 1.499, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3419778631519963e-06, |
|
"loss": 1.5103, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.331086103917525e-06, |
|
"loss": 1.6318, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.320216388290293e-06, |
|
"loss": 1.0039, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.3093687475143236e-06, |
|
"loss": 1.0791, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2985432127701945e-06, |
|
"loss": 1.1825, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2877398151749375e-06, |
|
"loss": 1.0322, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.27695858578195e-06, |
|
"loss": 1.0483, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2661995555809123e-06, |
|
"loss": 1.0518, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.255462755497696e-06, |
|
"loss": 1.3364, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2447482163942725e-06, |
|
"loss": 1.0654, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2340559690686247e-06, |
|
"loss": 0.9849, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.223386044254663e-06, |
|
"loss": 1.1753, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2127384726221303e-06, |
|
"loss": 0.9249, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2021132847765246e-06, |
|
"loss": 1.4648, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1915105112589894e-06, |
|
"loss": 1.0049, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1809301825462593e-06, |
|
"loss": 1.0864, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.170372329050533e-06, |
|
"loss": 1.5977, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.159836981119425e-06, |
|
"loss": 1.4272, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1493241690358437e-06, |
|
"loss": 1.3457, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.138833923017931e-06, |
|
"loss": 0.9218, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1283662732189624e-06, |
|
"loss": 1.0132, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.1179212497272582e-06, |
|
"loss": 1.6523, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.107498882566107e-06, |
|
"loss": 1.6416, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0970992016936685e-06, |
|
"loss": 1.5435, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0867222370028985e-06, |
|
"loss": 1.3032, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0763680183214485e-06, |
|
"loss": 1.479, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0660365754116042e-06, |
|
"loss": 1.5186, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0557279379701632e-06, |
|
"loss": 1.1572, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.045442135628389e-06, |
|
"loss": 1.0177, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0351791979518997e-06, |
|
"loss": 1.0464, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0249391544405916e-06, |
|
"loss": 1.2803, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0147220345285524e-06, |
|
"loss": 1.2764, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.004527867583982e-06, |
|
"loss": 1.1138, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9943566829091e-06, |
|
"loss": 1.4082, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9842085097400678e-06, |
|
"loss": 1.5366, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9740833772469025e-06, |
|
"loss": 1.7998, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.963981314533391e-06, |
|
"loss": 1.5234, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.953902350637006e-06, |
|
"loss": 1.1631, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9438465145288377e-06, |
|
"loss": 0.9619, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9338138351134773e-06, |
|
"loss": 1.5249, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9238043412289696e-06, |
|
"loss": 1.7227, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9138180616467105e-06, |
|
"loss": 1.6191, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.903855025071366e-06, |
|
"loss": 0.9546, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8939152601407952e-06, |
|
"loss": 1.4814, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8839987954259609e-06, |
|
"loss": 1.5742, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8741056594308571e-06, |
|
"loss": 1.2129, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8642358805924144e-06, |
|
"loss": 1.6606, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8543894872804368e-06, |
|
"loss": 0.9497, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8445665077974928e-06, |
|
"loss": 1.5234, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8347669703788683e-06, |
|
"loss": 0.8857, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.824990903192445e-06, |
|
"loss": 1.0371, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.815238334338667e-06, |
|
"loss": 1.0686, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8055092918504103e-06, |
|
"loss": 1.6206, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.795803803692947e-06, |
|
"loss": 0.9238, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7861218977638252e-06, |
|
"loss": 1.6865, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7764636018928249e-06, |
|
"loss": 1.0547, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.766828943841853e-06, |
|
"loss": 1.4316, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7572179513048703e-06, |
|
"loss": 1.103, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7476306519078168e-06, |
|
"loss": 1.2836, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.738067073208527e-06, |
|
"loss": 1.4331, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7285272426966526e-06, |
|
"loss": 1.6335, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7190111877935833e-06, |
|
"loss": 1.0571, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7095189358523668e-06, |
|
"loss": 1.5205, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.700050514157635e-06, |
|
"loss": 0.8805, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.690605949925519e-06, |
|
"loss": 1.6807, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6811852703035737e-06, |
|
"loss": 1.4707, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6717885023707004e-06, |
|
"loss": 1.5596, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6624156731370666e-06, |
|
"loss": 1.1389, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6530668095440339e-06, |
|
"loss": 1.1719, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.643741938464073e-06, |
|
"loss": 1.5112, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6344410867006944e-06, |
|
"loss": 1.0, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6251642809883595e-06, |
|
"loss": 1.1484, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6159115479924259e-06, |
|
"loss": 0.8926, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.606682914309038e-06, |
|
"loss": 1.2764, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.597478406465085e-06, |
|
"loss": 1.0288, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5882980509180956e-06, |
|
"loss": 0.9993, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.579141874056187e-06, |
|
"loss": 1.4414, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5700099021979642e-06, |
|
"loss": 1.7783, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5609021615924702e-06, |
|
"loss": 1.0283, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.551818678419087e-06, |
|
"loss": 1.4639, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5427594787874766e-06, |
|
"loss": 1.2378, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5337245887374962e-06, |
|
"loss": 1.3423, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.524714034239132e-06, |
|
"loss": 1.0757, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5157278411924148e-06, |
|
"loss": 0.9536, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5067660354273527e-06, |
|
"loss": 1.0134, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4978286427038602e-06, |
|
"loss": 1.0161, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4889156887116652e-06, |
|
"loss": 1.249, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4800271990702664e-06, |
|
"loss": 1.5391, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4711631993288222e-06, |
|
"loss": 1.3828, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.462323714966114e-06, |
|
"loss": 1.4619, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4535087713904484e-06, |
|
"loss": 1.3036, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4447183939395925e-06, |
|
"loss": 1.3345, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4359526078806985e-06, |
|
"loss": 1.8623, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4272114384102353e-06, |
|
"loss": 1.4834, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4184949106539137e-06, |
|
"loss": 1.5137, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4098030496666126e-06, |
|
"loss": 1.166, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4011358804323095e-06, |
|
"loss": 1.626, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3924934278640068e-06, |
|
"loss": 1.1748, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3838757168036598e-06, |
|
"loss": 1.0117, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3752827720221128e-06, |
|
"loss": 0.7277, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.366714618219015e-06, |
|
"loss": 1.4199, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3581712800227576e-06, |
|
"loss": 1.6641, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3496527819904038e-06, |
|
"loss": 1.3232, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3411591486076136e-06, |
|
"loss": 1.3037, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3326904042885757e-06, |
|
"loss": 0.8843, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.324246573375938e-06, |
|
"loss": 1.3393, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3158276801407432e-06, |
|
"loss": 1.4644, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3074337487823374e-06, |
|
"loss": 0.9814, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.299064803428336e-06, |
|
"loss": 1.0357, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.290720868134515e-06, |
|
"loss": 1.3647, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2824019668847775e-06, |
|
"loss": 1.3374, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2741081235910547e-06, |
|
"loss": 1.1929, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2658393620932652e-06, |
|
"loss": 0.8875, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2575957061592159e-06, |
|
"loss": 1.1807, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2493771794845644e-06, |
|
"loss": 1.293, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2411838056927295e-06, |
|
"loss": 1.2812, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.233015608334831e-06, |
|
"loss": 1.0635, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2248726108896225e-06, |
|
"loss": 1.2139, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.216754836763422e-06, |
|
"loss": 1.4155, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2086623092900441e-06, |
|
"loss": 0.9958, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2005950517307374e-06, |
|
"loss": 1.5215, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1925530872741098e-06, |
|
"loss": 1.1436, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1845364390360692e-06, |
|
"loss": 1.3711, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1765451300597574e-06, |
|
"loss": 1.4653, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1685791833154748e-06, |
|
"loss": 1.3442, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.160638621700624e-06, |
|
"loss": 1.0979, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1527234680396392e-06, |
|
"loss": 1.2544, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1448337450839208e-06, |
|
"loss": 1.2729, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1369694755117733e-06, |
|
"loss": 1.4575, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1291306819283353e-06, |
|
"loss": 1.2195, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.121317386865518e-06, |
|
"loss": 1.6055, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1135296127819407e-06, |
|
"loss": 1.0593, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1057673820628612e-06, |
|
"loss": 1.626, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.0980307170201222e-06, |
|
"loss": 1.2798, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.090319639892069e-06, |
|
"loss": 1.543, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0826341728435096e-06, |
|
"loss": 1.0747, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0749743379656263e-06, |
|
"loss": 1.4746, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0673401572759335e-06, |
|
"loss": 1.6484, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0597316527182e-06, |
|
"loss": 1.0044, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0521488461623919e-06, |
|
"loss": 1.3647, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0445917594046073e-06, |
|
"loss": 1.4268, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.037060414167017e-06, |
|
"loss": 1.4287, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.029554832097801e-06, |
|
"loss": 1.103, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0220750347710795e-06, |
|
"loss": 1.0405, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0146210436868675e-06, |
|
"loss": 1.007, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0071928802709886e-06, |
|
"loss": 1.6016, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.997905658750428e-07, |
|
"loss": 1.0405, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.924141217763117e-07, |
|
"loss": 1.5381, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.8506356917773e-07, |
|
"loss": 1.2305, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.777389292078033e-07, |
|
"loss": 1.1289, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.704402229205534e-07, |
|
"loss": 1.1543, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.631674712954586e-07, |
|
"loss": 1.7832, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.559206952373933e-07, |
|
"loss": 1.645, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.486999155765675e-07, |
|
"loss": 1.5034, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.415051530684671e-07, |
|
"loss": 1.6152, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.343364283937928e-07, |
|
"loss": 0.8618, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.271937621584026e-07, |
|
"loss": 1.6191, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.200771748932513e-07, |
|
"loss": 0.9402, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.129866870543347e-07, |
|
"loss": 0.8081, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.059223190226251e-07, |
|
"loss": 1.459, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.988840911040165e-07, |
|
"loss": 0.906, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.918720235292655e-07, |
|
"loss": 1.3604, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.84886136453933e-07, |
|
"loss": 1.0078, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.779264499583284e-07, |
|
"loss": 0.9829, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.709929840474463e-07, |
|
"loss": 1.5176, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.640857586509155e-07, |
|
"loss": 1.3147, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.572047936229355e-07, |
|
"loss": 1.2969, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.503501087422305e-07, |
|
"loss": 1.6924, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.435217237119719e-07, |
|
"loss": 1.5505, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.367196581597503e-07, |
|
"loss": 1.4619, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.299439316374858e-07, |
|
"loss": 0.9233, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.231945636214068e-07, |
|
"loss": 1.6211, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.164715735119599e-07, |
|
"loss": 1.1113, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.097749806337851e-07, |
|
"loss": 1.2598, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.031048042356393e-07, |
|
"loss": 1.3989, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.964610634903458e-07, |
|
"loss": 1.0234, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.898437774947442e-07, |
|
"loss": 1.1946, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.832529652696319e-07, |
|
"loss": 0.7934, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.766886457597089e-07, |
|
"loss": 1.1946, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.701508378335243e-07, |
|
"loss": 1.4277, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.636395602834223e-07, |
|
"loss": 1.7637, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.571548318254874e-07, |
|
"loss": 1.1538, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.506966710994945e-07, |
|
"loss": 0.9434, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.442650966688469e-07, |
|
"loss": 1.541, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.378601270205299e-07, |
|
"loss": 1.6367, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.314817805650576e-07, |
|
"loss": 1.5957, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.251300756364154e-07, |
|
"loss": 1.6123, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.188050304920113e-07, |
|
"loss": 1.4922, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.125066633126221e-07, |
|
"loss": 1.4092, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.062349922023414e-07, |
|
"loss": 1.3467, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.999900351885258e-07, |
|
"loss": 1.0742, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.937718102217461e-07, |
|
"loss": 1.4021, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.875803351757371e-07, |
|
"loss": 1.5396, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.814156278473338e-07, |
|
"loss": 0.842, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.752777059564431e-07, |
|
"loss": 1.5283, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.691665871459674e-07, |
|
"loss": 1.6631, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.630822889817734e-07, |
|
"loss": 1.2393, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.570248289526326e-07, |
|
"loss": 1.6309, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.509942244701717e-07, |
|
"loss": 1.5322, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.449904928688244e-07, |
|
"loss": 1.083, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.390136514057788e-07, |
|
"loss": 1.1235, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.330637172609333e-07, |
|
"loss": 1.5137, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.271407075368385e-07, |
|
"loss": 1.5005, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.212446392586613e-07, |
|
"loss": 1.5918, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.15375529374116e-07, |
|
"loss": 1.415, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.095333947534421e-07, |
|
"loss": 1.1992, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.037182521893247e-07, |
|
"loss": 1.3105, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.979301183968789e-07, |
|
"loss": 1.3635, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.921690100135713e-07, |
|
"loss": 1.5068, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.864349435991978e-07, |
|
"loss": 0.9624, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.807279356358164e-07, |
|
"loss": 1.7383, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.750480025277116e-07, |
|
"loss": 1.3501, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.693951606013448e-07, |
|
"loss": 1.6592, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.637694261053029e-07, |
|
"loss": 0.8959, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.581708152102583e-07, |
|
"loss": 1.397, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.525993440089161e-07, |
|
"loss": 1.3096, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.470550285159704e-07, |
|
"loss": 1.4268, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.415378846680641e-07, |
|
"loss": 1.1704, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.360479283237319e-07, |
|
"loss": 0.9492, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.305851752633628e-07, |
|
"loss": 1.2798, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.251496411891521e-07, |
|
"loss": 1.4629, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.197413417250563e-07, |
|
"loss": 0.9551, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.143602924167479e-07, |
|
"loss": 1.2041, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.090065087315721e-07, |
|
"loss": 0.9448, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.036800060585023e-07, |
|
"loss": 1.4932, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.983807997080925e-07, |
|
"loss": 1.4126, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.931089049124405e-07, |
|
"loss": 1.0737, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.878643368251323e-07, |
|
"loss": 1.5322, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.826471105212149e-07, |
|
"loss": 1.4697, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.774572409971334e-07, |
|
"loss": 0.9136, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.722947431707092e-07, |
|
"loss": 0.953, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.671596318810745e-07, |
|
"loss": 1.0669, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.620519218886543e-07, |
|
"loss": 1.4785, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.5697162787509686e-07, |
|
"loss": 1.3447, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.5191876444325986e-07, |
|
"loss": 1.0176, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.468933461171432e-07, |
|
"loss": 1.314, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.418953873418641e-07, |
|
"loss": 1.5366, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.3692490248360755e-07, |
|
"loss": 0.8837, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.319819058295893e-07, |
|
"loss": 1.4487, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.2706641158800855e-07, |
|
"loss": 1.0796, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.221784338880153e-07, |
|
"loss": 1.0431, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.173179867796662e-07, |
|
"loss": 1.373, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.124850842338779e-07, |
|
"loss": 1.2847, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.076797401423993e-07, |
|
"loss": 1.2471, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.0290196831776264e-07, |
|
"loss": 1.0312, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.981517824932446e-07, |
|
"loss": 1.214, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9342919632283093e-07, |
|
"loss": 1.4346, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8873422338117084e-07, |
|
"loss": 1.4993, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8406687716354695e-07, |
|
"loss": 0.8267, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.7942717108582663e-07, |
|
"loss": 1.0364, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.748151184844295e-07, |
|
"loss": 0.7578, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.702307326162924e-07, |
|
"loss": 1.5908, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.656740266588177e-07, |
|
"loss": 1.2075, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.611450137098538e-07, |
|
"loss": 1.0254, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5664370678763825e-07, |
|
"loss": 1.2026, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5217011883077914e-07, |
|
"loss": 1.3481, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.477242626982047e-07, |
|
"loss": 0.9426, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4330615116912933e-07, |
|
"loss": 1.4053, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.389157969430201e-07, |
|
"loss": 1.4771, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.345532126395579e-07, |
|
"loss": 1.6562, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.302184107986017e-07, |
|
"loss": 0.9888, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.259114038801503e-07, |
|
"loss": 1.2437, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.216322042643127e-07, |
|
"loss": 1.5208, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.17380824251261e-07, |
|
"loss": 0.9585, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.1315727606121226e-07, |
|
"loss": 1.3574, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.0896157183437305e-07, |
|
"loss": 1.4399, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.0479372363092506e-07, |
|
"loss": 1.7236, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.0065374343096974e-07, |
|
"loss": 1.5107, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.965416431345147e-07, |
|
"loss": 1.3484, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9245743456142283e-07, |
|
"loss": 1.6045, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8840112945138553e-07, |
|
"loss": 1.335, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8437273946388954e-07, |
|
"loss": 1.4961, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.803722761781824e-07, |
|
"loss": 1.6963, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.763997510932381e-07, |
|
"loss": 1.1687, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.7245517562772385e-07, |
|
"loss": 1.5439, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.685385611199698e-07, |
|
"loss": 1.437, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.646499188279328e-07, |
|
"loss": 1.5298, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.607892599291695e-07, |
|
"loss": 1.5469, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.5695659552079866e-07, |
|
"loss": 1.3394, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.5315193661947013e-07, |
|
"loss": 0.9432, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.493752941613348e-07, |
|
"loss": 1.3271, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.4562667900201677e-07, |
|
"loss": 1.3784, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.4190610191657246e-07, |
|
"loss": 1.3979, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.3821357359946838e-07, |
|
"loss": 1.0366, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.345491046645465e-07, |
|
"loss": 1.5308, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.309127056449989e-07, |
|
"loss": 1.4268, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.2730438699332446e-07, |
|
"loss": 1.52, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.237241590813155e-07, |
|
"loss": 1.3594, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.2017203220001559e-07, |
|
"loss": 1.0269, |
|
"step": 1787 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1664801655969735e-07, |
|
"loss": 1.5781, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1315212228982584e-07, |
|
"loss": 0.9819, |
|
"step": 1789 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0968435943904075e-07, |
|
"loss": 1.3218, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.062447379751109e-07, |
|
"loss": 1.4053, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.028332677849254e-07, |
|
"loss": 1.5898, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9944995867444806e-07, |
|
"loss": 1.5996, |
|
"step": 1793 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9609482036869853e-07, |
|
"loss": 1.5527, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.927678625117224e-07, |
|
"loss": 0.9492, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8946909466656115e-07, |
|
"loss": 1.1089, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8619852631523105e-07, |
|
"loss": 1.7178, |
|
"step": 1797 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8295616685868434e-07, |
|
"loss": 1.3276, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.797420256167981e-07, |
|
"loss": 1.0581, |
|
"step": 1799 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7655611182832767e-07, |
|
"loss": 1.4346, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7339843465090322e-07, |
|
"loss": 0.9688, |
|
"step": 1801 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7026900316098217e-07, |
|
"loss": 0.8027, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6716782635383677e-07, |
|
"loss": 0.9595, |
|
"step": 1803 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.640949131435221e-07, |
|
"loss": 1.5947, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6105027236285265e-07, |
|
"loss": 1.2339, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5803391276337565e-07, |
|
"loss": 1.3301, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5504584301534677e-07, |
|
"loss": 1.6084, |
|
"step": 1807 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.520860717077055e-07, |
|
"loss": 1.417, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.49154607348051e-07, |
|
"loss": 1.5889, |
|
"step": 1809 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.462514583626129e-07, |
|
"loss": 1.3867, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.4337663309623718e-07, |
|
"loss": 1.2871, |
|
"step": 1811 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.405301398123482e-07, |
|
"loss": 1.2622, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3771198669293885e-07, |
|
"loss": 1.7227, |
|
"step": 1813 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3492218183853712e-07, |
|
"loss": 1.6777, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3216073326818736e-07, |
|
"loss": 1.5366, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2942764891942793e-07, |
|
"loss": 1.5947, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2672293664826364e-07, |
|
"loss": 1.5918, |
|
"step": 1817 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2404660422914993e-07, |
|
"loss": 1.4131, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.213986593549621e-07, |
|
"loss": 1.6553, |
|
"step": 1819 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1877910963698502e-07, |
|
"loss": 1.0903, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1618796260487675e-07, |
|
"loss": 1.4561, |
|
"step": 1821 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1362522570665945e-07, |
|
"loss": 1.2747, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1109090630868958e-07, |
|
"loss": 0.9189, |
|
"step": 1823 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0858501169564439e-07, |
|
"loss": 1.0294, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0610754907049104e-07, |
|
"loss": 1.2437, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0365852555447642e-07, |
|
"loss": 1.4097, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0123794818709732e-07, |
|
"loss": 1.5571, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.884582392608921e-08, |
|
"loss": 1.0684, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.64821596473986e-08, |
|
"loss": 1.3369, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.414696214516628e-08, |
|
"loss": 1.0073, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.184023813170961e-08, |
|
"loss": 0.988, |
|
"step": 1831 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.956199423750033e-08, |
|
"loss": 0.8818, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.731223701114566e-08, |
|
"loss": 1.6055, |
|
"step": 1833 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.509097291937273e-08, |
|
"loss": 1.4214, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.289820834700757e-08, |
|
"loss": 1.5654, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.073394959695391e-08, |
|
"loss": 1.4844, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.859820289018105e-08, |
|
"loss": 1.1074, |
|
"step": 1837 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.649097436570164e-08, |
|
"loss": 1.3979, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.441227008055607e-08, |
|
"loss": 1.1084, |
|
"step": 1839 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.23620960097926e-08, |
|
"loss": 0.7485, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.034045804645506e-08, |
|
"loss": 1.0649, |
|
"step": 1841 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.834736200155956e-08, |
|
"loss": 1.2095, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.638281360408339e-08, |
|
"loss": 1.3506, |
|
"step": 1843 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.444681850094392e-08, |
|
"loss": 1.3369, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.253938225698863e-08, |
|
"loss": 0.9146, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.066051035496956e-08, |
|
"loss": 1.0986, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.881020819553773e-08, |
|
"loss": 1.4478, |
|
"step": 1847 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.6988481097219886e-08, |
|
"loss": 1.2568, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.519533429640955e-08, |
|
"loss": 1.1743, |
|
"step": 1849 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.343077294734378e-08, |
|
"loss": 1.5654, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.1694802122099763e-08, |
|
"loss": 1.4131, |
|
"step": 1851 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.9987426810569337e-08, |
|
"loss": 1.4297, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.8308651920448976e-08, |
|
"loss": 1.0591, |
|
"step": 1853 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.665848227722869e-08, |
|
"loss": 1.117, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.503692262417203e-08, |
|
"loss": 1.6279, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.344397762230834e-08, |
|
"loss": 1.5068, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.1879651850414984e-08, |
|
"loss": 1.4214, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.0343949805006224e-08, |
|
"loss": 1.5132, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8836875900321036e-08, |
|
"loss": 1.4668, |
|
"step": 1859 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.735843446830867e-08, |
|
"loss": 1.3789, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.590862975861642e-08, |
|
"loss": 1.0439, |
|
"step": 1861 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.448746593857744e-08, |
|
"loss": 1.5684, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.309494709320182e-08, |
|
"loss": 1.4878, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.173107722515889e-08, |
|
"loss": 1.0449, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.0395860254772705e-08, |
|
"loss": 1.0469, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.9089300020002097e-08, |
|
"loss": 0.9731, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.781140027643958e-08, |
|
"loss": 1.3989, |
|
"step": 1867 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.656216469729356e-08, |
|
"loss": 1.2993, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.5341596873379447e-08, |
|
"loss": 1.0331, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.4149700313110813e-08, |
|
"loss": 1.6216, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.2986478442486026e-08, |
|
"loss": 1.3672, |
|
"step": 1871 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.1851934605086057e-08, |
|
"loss": 1.3789, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.074607206205226e-08, |
|
"loss": 1.5688, |
|
"step": 1873 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.96688939920886e-08, |
|
"loss": 1.0527, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8620403491448334e-08, |
|
"loss": 1.6191, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7600603573921794e-08, |
|
"loss": 1.4985, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6609497170834154e-08, |
|
"loss": 1.3945, |
|
"step": 1877 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5647087131031023e-08, |
|
"loss": 1.4707, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4713376220875097e-08, |
|
"loss": 1.0735, |
|
"step": 1879 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3808367124233945e-08, |
|
"loss": 1.2207, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2932062442475579e-08, |
|
"loss": 1.7051, |
|
"step": 1881 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2084464694459563e-08, |
|
"loss": 1.5303, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.1265576316530358e-08, |
|
"loss": 1.0596, |
|
"step": 1883 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0475399662508434e-08, |
|
"loss": 1.0874, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.713937003685836e-09, |
|
"loss": 1.3599, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.981190528819517e-09, |
|
"loss": 1.5312, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.277162344123568e-09, |
|
"loss": 1.5804, |
|
"step": 1887 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.60185447326478e-09, |
|
"loss": 1.4702, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.955268857353759e-09, |
|
"loss": 1.4707, |
|
"step": 1889 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.3374073549438144e-09, |
|
"loss": 1.3647, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.748271742023192e-09, |
|
"loss": 1.561, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.187863712007302e-09, |
|
"loss": 1.5264, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.6561848757420425e-09, |
|
"loss": 1.5068, |
|
"step": 1893 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.153236761488266e-09, |
|
"loss": 1.5322, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.679020814926215e-09, |
|
"loss": 1.394, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.2335383991477507e-09, |
|
"loss": 1.0673, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.8167907946519134e-09, |
|
"loss": 1.1443, |
|
"step": 1897 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.428779199341591e-09, |
|
"loss": 1.7559, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.069504728522409e-09, |
|
"loss": 1.1357, |
|
"step": 1899 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7389684148960694e-09, |
|
"loss": 1.2192, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.4371712085603506e-09, |
|
"loss": 1.2759, |
|
"step": 1901 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.164113977003556e-09, |
|
"loss": 1.2883, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.197975051045138e-10, |
|
"loss": 1.4995, |
|
"step": 1903 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.042224951292475e-10, |
|
"loss": 1.0126, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.173895667287543e-10, |
|
"loss": 0.9268, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.5929925693789504e-10, |
|
"loss": 1.0811, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.2995202017317418e-10, |
|
"loss": 1.2202, |
|
"step": 1907 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.2934822823051917e-10, |
|
"loss": 1.3159, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.748817028750075e-11, |
|
"loss": 1.5366, |
|
"step": 1909 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.4372052900002432e-11, |
|
"loss": 1.2114, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.5439, |
|
"step": 1911 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1911, |
|
"total_flos": 1.3552964313840353e+19, |
|
"train_loss": 1.450651798617584, |
|
"train_runtime": 41416.7741, |
|
"train_samples_per_second": 0.369, |
|
"train_steps_per_second": 0.046 |
|
} |
|
], |
|
"max_steps": 1911, |
|
"num_train_epochs": 1, |
|
"total_flos": 1.3552964313840353e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|