|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"global_step": 127988, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.980466918773635e-05, |
|
"loss": 3.8391, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.96093383754727e-05, |
|
"loss": 3.5549, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.941400756320905e-05, |
|
"loss": 3.5792, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9218676750945406e-05, |
|
"loss": 3.4807, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9023345938681755e-05, |
|
"loss": 5.5711, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8828015126418104e-05, |
|
"loss": 3.4924, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.863268431415445e-05, |
|
"loss": 3.3686, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.84373535018908e-05, |
|
"loss": 3.3857, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.824202268962715e-05, |
|
"loss": 3.3547, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.80466918773635e-05, |
|
"loss": 3.3625, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7851361065099856e-05, |
|
"loss": 3.3164, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.7656030252836205e-05, |
|
"loss": 3.301, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.7460699440572554e-05, |
|
"loss": 3.1746, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.7265368628308904e-05, |
|
"loss": 3.2548, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.707003781604525e-05, |
|
"loss": 3.1595, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.687470700378161e-05, |
|
"loss": 3.2356, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.667937619151796e-05, |
|
"loss": 3.2533, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.648404537925431e-05, |
|
"loss": 3.2142, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.628871456699066e-05, |
|
"loss": 3.1645, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.609338375472701e-05, |
|
"loss": 3.2479, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.589805294246336e-05, |
|
"loss": 3.2195, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.570272213019971e-05, |
|
"loss": 3.1258, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.550739131793606e-05, |
|
"loss": 3.1771, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.531206050567241e-05, |
|
"loss": 3.2118, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.511672969340876e-05, |
|
"loss": 3.101, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.492139888114511e-05, |
|
"loss": 3.129, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.472606806888146e-05, |
|
"loss": 3.1021, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.453073725661781e-05, |
|
"loss": 3.1185, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.433540644435416e-05, |
|
"loss": 3.1291, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.414007563209051e-05, |
|
"loss": 3.0354, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.394474481982686e-05, |
|
"loss": 3.0164, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.374941400756321e-05, |
|
"loss": 3.0837, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.3554083195299564e-05, |
|
"loss": 3.0235, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.335875238303591e-05, |
|
"loss": 3.0046, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.316342157077226e-05, |
|
"loss": 3.0702, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.296809075850861e-05, |
|
"loss": 3.0582, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.277275994624496e-05, |
|
"loss": 3.039, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.257742913398131e-05, |
|
"loss": 3.1094, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.238209832171766e-05, |
|
"loss": 2.9548, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.2186767509454014e-05, |
|
"loss": 3.0493, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.1991436697190363e-05, |
|
"loss": 3.0473, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.179610588492671e-05, |
|
"loss": 3.0363, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.160077507266306e-05, |
|
"loss": 2.9445, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.140544426039941e-05, |
|
"loss": 2.9368, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.121011344813576e-05, |
|
"loss": 2.9829, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.1014782635872116e-05, |
|
"loss": 3.0427, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.0819451823608465e-05, |
|
"loss": 2.974, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.062412101134482e-05, |
|
"loss": 2.9314, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.042879019908117e-05, |
|
"loss": 2.9788, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.023345938681752e-05, |
|
"loss": 2.9794, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.003812857455387e-05, |
|
"loss": 2.9851, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.984279776229022e-05, |
|
"loss": 2.9453, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.9647466950026566e-05, |
|
"loss": 3.0433, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.9452136137762916e-05, |
|
"loss": 2.956, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.925680532549927e-05, |
|
"loss": 2.9388, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.906147451323562e-05, |
|
"loss": 2.9308, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.886614370097197e-05, |
|
"loss": 2.9588, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.867081288870832e-05, |
|
"loss": 2.9588, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.847548207644467e-05, |
|
"loss": 2.8812, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.828015126418102e-05, |
|
"loss": 2.9103, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.8084820451917366e-05, |
|
"loss": 2.8671, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.788948963965372e-05, |
|
"loss": 2.868, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.769415882739007e-05, |
|
"loss": 2.9499, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.749882801512642e-05, |
|
"loss": 2.9272, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.730349720286277e-05, |
|
"loss": 2.3079, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.710816639059912e-05, |
|
"loss": 2.3296, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.691283557833547e-05, |
|
"loss": 2.3636, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.671750476607182e-05, |
|
"loss": 2.3879, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.652217395380817e-05, |
|
"loss": 2.3653, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.632684314154452e-05, |
|
"loss": 2.2813, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.613151232928087e-05, |
|
"loss": 2.3776, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.593618151701722e-05, |
|
"loss": 2.3478, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.574085070475357e-05, |
|
"loss": 2.3453, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.554551989248992e-05, |
|
"loss": 2.3145, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.535018908022627e-05, |
|
"loss": 2.3296, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.515485826796262e-05, |
|
"loss": 2.3543, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.495952745569898e-05, |
|
"loss": 2.289, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.476419664343533e-05, |
|
"loss": 2.4305, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.456886583117168e-05, |
|
"loss": 2.3672, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.4373535018908026e-05, |
|
"loss": 2.3041, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.4178204206644375e-05, |
|
"loss": 2.3282, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.3982873394380725e-05, |
|
"loss": 2.3631, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.3787542582117074e-05, |
|
"loss": 2.2899, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.359221176985343e-05, |
|
"loss": 2.3022, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.339688095758978e-05, |
|
"loss": 2.3512, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.320155014532613e-05, |
|
"loss": 2.3304, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.300621933306248e-05, |
|
"loss": 2.3307, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.2810888520798826e-05, |
|
"loss": 2.3752, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.2615557708535175e-05, |
|
"loss": 2.2553, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.2420226896271524e-05, |
|
"loss": 2.3906, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.222489608400788e-05, |
|
"loss": 2.3277, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.202956527174423e-05, |
|
"loss": 2.3515, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.183423445948058e-05, |
|
"loss": 2.3409, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.163890364721693e-05, |
|
"loss": 2.3093, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.1443572834953277e-05, |
|
"loss": 2.2961, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.1248242022689626e-05, |
|
"loss": 2.2775, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.1052911210425975e-05, |
|
"loss": 2.2714, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.085758039816233e-05, |
|
"loss": 2.3634, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.066224958589868e-05, |
|
"loss": 2.353, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.046691877363503e-05, |
|
"loss": 2.3295, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.0271587961371378e-05, |
|
"loss": 2.3014, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.0076257149107727e-05, |
|
"loss": 2.319, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.988092633684408e-05, |
|
"loss": 2.3364, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.968559552458043e-05, |
|
"loss": 2.3935, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.9490264712316778e-05, |
|
"loss": 2.379, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.9294933900053127e-05, |
|
"loss": 2.284, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.9099603087789483e-05, |
|
"loss": 2.317, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.8904272275525835e-05, |
|
"loss": 2.3177, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.8708941463262185e-05, |
|
"loss": 2.3619, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.8513610650998534e-05, |
|
"loss": 2.3052, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.8318279838734886e-05, |
|
"loss": 2.3617, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.8122949026471235e-05, |
|
"loss": 2.3074, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.7927618214207584e-05, |
|
"loss": 2.3894, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.7732287401943933e-05, |
|
"loss": 2.3208, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.7536956589680286e-05, |
|
"loss": 2.3569, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.7341625777416635e-05, |
|
"loss": 2.4341, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.7146294965152984e-05, |
|
"loss": 2.246, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.6950964152889337e-05, |
|
"loss": 2.2938, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.6755633340625686e-05, |
|
"loss": 2.3378, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.6560302528362035e-05, |
|
"loss": 2.297, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.6364971716098384e-05, |
|
"loss": 2.3859, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.6169640903834737e-05, |
|
"loss": 2.4035, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.5974310091571086e-05, |
|
"loss": 2.3027, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.5778979279307435e-05, |
|
"loss": 2.2821, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.5583648467043787e-05, |
|
"loss": 2.3267, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.5388317654780136e-05, |
|
"loss": 2.3205, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.5192986842516485e-05, |
|
"loss": 2.3695, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.4997656030252835e-05, |
|
"loss": 2.3184, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.4802325217989187e-05, |
|
"loss": 1.6629, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.460699440572554e-05, |
|
"loss": 1.6359, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.441166359346189e-05, |
|
"loss": 1.5733, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.4216332781198238e-05, |
|
"loss": 1.5338, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.402100196893459e-05, |
|
"loss": 1.5802, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.382567115667094e-05, |
|
"loss": 1.5904, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.363034034440729e-05, |
|
"loss": 1.6188, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.343500953214364e-05, |
|
"loss": 1.6242, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.323967871987999e-05, |
|
"loss": 1.6195, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.304434790761634e-05, |
|
"loss": 1.5721, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.284901709535269e-05, |
|
"loss": 1.6324, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.265368628308904e-05, |
|
"loss": 1.5727, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.245835547082539e-05, |
|
"loss": 1.5782, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.226302465856174e-05, |
|
"loss": 1.5516, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.206769384629809e-05, |
|
"loss": 1.6206, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.187236303403444e-05, |
|
"loss": 1.6315, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.1677032221770793e-05, |
|
"loss": 1.6529, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.1481701409507142e-05, |
|
"loss": 1.5855, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.1286370597243495e-05, |
|
"loss": 1.565, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.1091039784979844e-05, |
|
"loss": 1.621, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.0895708972716193e-05, |
|
"loss": 1.6038, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.0700378160452542e-05, |
|
"loss": 1.6686, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.0505047348188895e-05, |
|
"loss": 1.6477, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.0309716535925244e-05, |
|
"loss": 1.6113, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0114385723661593e-05, |
|
"loss": 1.6444, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9919054911397945e-05, |
|
"loss": 1.5774, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9723724099134295e-05, |
|
"loss": 1.6326, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.9528393286870644e-05, |
|
"loss": 1.689, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9333062474606993e-05, |
|
"loss": 1.5713, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.9137731662343345e-05, |
|
"loss": 1.6681, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.8942400850079694e-05, |
|
"loss": 1.6577, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.8747070037816047e-05, |
|
"loss": 1.6331, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.8551739225552396e-05, |
|
"loss": 1.6913, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.835640841328875e-05, |
|
"loss": 1.5789, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.8161077601025098e-05, |
|
"loss": 1.6053, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.7965746788761447e-05, |
|
"loss": 1.6313, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.77704159764978e-05, |
|
"loss": 1.6604, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.757508516423415e-05, |
|
"loss": 1.6661, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.7379754351970497e-05, |
|
"loss": 1.7325, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.7184423539706847e-05, |
|
"loss": 1.6432, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.69890927274432e-05, |
|
"loss": 1.6206, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.6793761915179548e-05, |
|
"loss": 1.5495, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.6598431102915897e-05, |
|
"loss": 1.6021, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.640310029065225e-05, |
|
"loss": 1.6682, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.62077694783886e-05, |
|
"loss": 1.6304, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.6012438666124948e-05, |
|
"loss": 1.6224, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.58171078538613e-05, |
|
"loss": 1.5918, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.5621777041597653e-05, |
|
"loss": 1.6452, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.5426446229334002e-05, |
|
"loss": 1.5782, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.5231115417070351e-05, |
|
"loss": 1.5876, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.5035784604806702e-05, |
|
"loss": 1.6174, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.4840453792543051e-05, |
|
"loss": 1.6148, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.4645122980279402e-05, |
|
"loss": 1.6081, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4449792168015753e-05, |
|
"loss": 1.5744, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.4254461355752102e-05, |
|
"loss": 1.6439, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.4059130543488453e-05, |
|
"loss": 1.665, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.3863799731224802e-05, |
|
"loss": 1.5745, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.3668468918961153e-05, |
|
"loss": 1.6089, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3473138106697503e-05, |
|
"loss": 1.586, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.3277807294433853e-05, |
|
"loss": 1.6731, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.3082476482170203e-05, |
|
"loss": 1.5726, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.2887145669906556e-05, |
|
"loss": 1.5644, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.2691814857642905e-05, |
|
"loss": 1.5762, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.2496484045379254e-05, |
|
"loss": 1.5657, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.2301153233115605e-05, |
|
"loss": 0.9433, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.2105822420851956e-05, |
|
"loss": 0.946, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.1910491608588307e-05, |
|
"loss": 0.9428, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.1715160796324656e-05, |
|
"loss": 0.8946, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.1519829984061006e-05, |
|
"loss": 0.9151, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.1324499171797357e-05, |
|
"loss": 0.9333, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.1129168359533706e-05, |
|
"loss": 0.9164, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.0933837547270057e-05, |
|
"loss": 0.9953, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.0738506735006406e-05, |
|
"loss": 0.9017, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.0543175922742759e-05, |
|
"loss": 0.9446, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.0347845110479108e-05, |
|
"loss": 0.9605, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.0152514298215459e-05, |
|
"loss": 0.9269, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 9.957183485951808e-06, |
|
"loss": 0.9677, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 9.761852673688159e-06, |
|
"loss": 0.9406, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 9.56652186142451e-06, |
|
"loss": 0.9382, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 9.371191049160859e-06, |
|
"loss": 0.9711, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 9.175860236897211e-06, |
|
"loss": 0.9505, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 8.98052942463356e-06, |
|
"loss": 0.9686, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 8.785198612369911e-06, |
|
"loss": 0.9394, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 8.58986780010626e-06, |
|
"loss": 0.9612, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.394536987842611e-06, |
|
"loss": 0.968, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.19920617557896e-06, |
|
"loss": 0.9907, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 8.00387536331531e-06, |
|
"loss": 0.964, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 7.808544551051662e-06, |
|
"loss": 0.9707, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 7.613213738788012e-06, |
|
"loss": 0.9111, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 7.417882926524362e-06, |
|
"loss": 0.9294, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 7.222552114260712e-06, |
|
"loss": 0.9288, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 7.027221301997062e-06, |
|
"loss": 0.9456, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 6.831890489733412e-06, |
|
"loss": 0.9388, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.636559677469763e-06, |
|
"loss": 0.9385, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 6.441228865206113e-06, |
|
"loss": 0.9457, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.245898052942464e-06, |
|
"loss": 0.9371, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 6.050567240678814e-06, |
|
"loss": 0.892, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 5.855236428415165e-06, |
|
"loss": 0.9104, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 5.6599056161515146e-06, |
|
"loss": 0.9317, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 5.4645748038878645e-06, |
|
"loss": 0.8673, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 5.2692439916242145e-06, |
|
"loss": 0.9329, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.073913179360565e-06, |
|
"loss": 0.9171, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.878582367096915e-06, |
|
"loss": 0.8971, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.683251554833266e-06, |
|
"loss": 0.9205, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.487920742569616e-06, |
|
"loss": 0.9046, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.292589930305967e-06, |
|
"loss": 0.9163, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.097259118042317e-06, |
|
"loss": 0.9241, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.901928305778667e-06, |
|
"loss": 0.8993, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.7065974935150167e-06, |
|
"loss": 0.8975, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.5112666812513675e-06, |
|
"loss": 0.9508, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.315935868987718e-06, |
|
"loss": 0.8833, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.120605056724068e-06, |
|
"loss": 0.941, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 2.9252742444604182e-06, |
|
"loss": 0.9286, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.7299434321967686e-06, |
|
"loss": 0.9103, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.534612619933119e-06, |
|
"loss": 0.9002, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 2.3392818076694694e-06, |
|
"loss": 0.8605, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.1439509954058193e-06, |
|
"loss": 0.8555, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.9486201831421697e-06, |
|
"loss": 0.8961, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.75328937087852e-06, |
|
"loss": 0.9079, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.55795855861487e-06, |
|
"loss": 0.9456, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.3626277463512205e-06, |
|
"loss": 0.905, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.1672969340875708e-06, |
|
"loss": 0.8558, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 9.71966121823921e-07, |
|
"loss": 0.9055, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 7.766353095602713e-07, |
|
"loss": 0.9165, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 5.813044972966216e-07, |
|
"loss": 0.9449, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.8597368503297185e-07, |
|
"loss": 0.8955, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.9064287276932213e-07, |
|
"loss": 0.9181, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 127988, |
|
"total_flos": 7.906061868859392e+16, |
|
"train_loss": 2.0082999333540426, |
|
"train_runtime": 37501.9411, |
|
"train_samples_per_second": 3.413, |
|
"train_steps_per_second": 3.413 |
|
} |
|
], |
|
"max_steps": 127988, |
|
"num_train_epochs": 4, |
|
"total_flos": 7.906061868859392e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|