|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.988913525498892, |
|
"eval_steps": 500, |
|
"global_step": 1125, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004434589800443459, |
|
"grad_norm": 1.9132194519042969, |
|
"learning_rate": 1.7699115044247788e-06, |
|
"loss": 2.8127, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.022172949002217297, |
|
"grad_norm": 2.0755808353424072, |
|
"learning_rate": 8.849557522123894e-06, |
|
"loss": 2.8248, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04434589800443459, |
|
"grad_norm": 1.6995130777359009, |
|
"learning_rate": 1.7699115044247787e-05, |
|
"loss": 2.7381, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06651884700665188, |
|
"grad_norm": 1.6350675821304321, |
|
"learning_rate": 2.6548672566371686e-05, |
|
"loss": 2.6572, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08869179600886919, |
|
"grad_norm": 1.2029041051864624, |
|
"learning_rate": 3.5398230088495574e-05, |
|
"loss": 2.4881, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11086474501108648, |
|
"grad_norm": 0.7936518788337708, |
|
"learning_rate": 4.4247787610619477e-05, |
|
"loss": 2.3403, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.13303769401330376, |
|
"grad_norm": 0.8098437190055847, |
|
"learning_rate": 5.309734513274337e-05, |
|
"loss": 2.2186, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15521064301552107, |
|
"grad_norm": 0.6613564491271973, |
|
"learning_rate": 6.194690265486725e-05, |
|
"loss": 2.1041, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.17738359201773837, |
|
"grad_norm": 0.6659165620803833, |
|
"learning_rate": 7.079646017699115e-05, |
|
"loss": 1.9928, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19955654101995565, |
|
"grad_norm": 0.559091329574585, |
|
"learning_rate": 7.964601769911504e-05, |
|
"loss": 1.9216, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.22172949002217296, |
|
"grad_norm": 0.4300543963909149, |
|
"learning_rate": 8.849557522123895e-05, |
|
"loss": 1.8657, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.34058740735054016, |
|
"learning_rate": 9.734513274336283e-05, |
|
"loss": 1.7914, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2660753880266075, |
|
"grad_norm": 0.34278205037117004, |
|
"learning_rate": 0.00010619469026548674, |
|
"loss": 1.7762, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.28824833702882485, |
|
"grad_norm": 0.31179192662239075, |
|
"learning_rate": 0.00011504424778761063, |
|
"loss": 1.7351, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.31042128603104213, |
|
"grad_norm": 0.3422749936580658, |
|
"learning_rate": 0.0001238938053097345, |
|
"loss": 1.6966, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3325942350332594, |
|
"grad_norm": 0.4129348397254944, |
|
"learning_rate": 0.00013274336283185842, |
|
"loss": 1.6817, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.35476718403547675, |
|
"grad_norm": 0.48307937383651733, |
|
"learning_rate": 0.0001415929203539823, |
|
"loss": 1.6672, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.376940133037694, |
|
"grad_norm": 0.5420916676521301, |
|
"learning_rate": 0.00015044247787610618, |
|
"loss": 1.6174, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3991130820399113, |
|
"grad_norm": 0.41327813267707825, |
|
"learning_rate": 0.0001592920353982301, |
|
"loss": 1.5966, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4212860310421286, |
|
"grad_norm": 0.4299337565898895, |
|
"learning_rate": 0.000168141592920354, |
|
"loss": 1.569, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4434589800443459, |
|
"grad_norm": 0.3761642575263977, |
|
"learning_rate": 0.0001769911504424779, |
|
"loss": 1.5668, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4656319290465632, |
|
"grad_norm": 0.41222497820854187, |
|
"learning_rate": 0.0001858407079646018, |
|
"loss": 1.5515, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.4026060402393341, |
|
"learning_rate": 0.00019469026548672567, |
|
"loss": 1.5341, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5099778270509978, |
|
"grad_norm": 0.36704277992248535, |
|
"learning_rate": 0.00019999807262012045, |
|
"loss": 1.536, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.532150776053215, |
|
"grad_norm": 0.33943068981170654, |
|
"learning_rate": 0.00019997639044970784, |
|
"loss": 1.5084, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5543237250554324, |
|
"grad_norm": 0.42166343331336975, |
|
"learning_rate": 0.00019993062212508053, |
|
"loss": 1.5056, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5764966740576497, |
|
"grad_norm": 0.3949909806251526, |
|
"learning_rate": 0.00019986077867267113, |
|
"loss": 1.4979, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5986696230598669, |
|
"grad_norm": 0.47220396995544434, |
|
"learning_rate": 0.00019976687691905393, |
|
"loss": 1.4935, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6208425720620843, |
|
"grad_norm": 0.4970516860485077, |
|
"learning_rate": 0.00019964893948689122, |
|
"loss": 1.5058, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6430155210643016, |
|
"grad_norm": 0.40719351172447205, |
|
"learning_rate": 0.00019950699478948309, |
|
"loss": 1.4937, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6651884700665188, |
|
"grad_norm": 0.3738563358783722, |
|
"learning_rate": 0.000199341077023922, |
|
"loss": 1.4998, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6873614190687362, |
|
"grad_norm": 0.3738692104816437, |
|
"learning_rate": 0.00019915122616285418, |
|
"loss": 1.4868, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7095343680709535, |
|
"grad_norm": 0.3409136235713959, |
|
"learning_rate": 0.00019893748794484948, |
|
"loss": 1.4755, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 0.36116743087768555, |
|
"learning_rate": 0.0001986999138633821, |
|
"loss": 1.4555, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.753880266075388, |
|
"grad_norm": 0.38903379440307617, |
|
"learning_rate": 0.00019843856115442482, |
|
"loss": 1.4628, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7760532150776053, |
|
"grad_norm": 0.42216047644615173, |
|
"learning_rate": 0.00019815349278265988, |
|
"loss": 1.4725, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7982261640798226, |
|
"grad_norm": 0.3521736264228821, |
|
"learning_rate": 0.00019784477742630952, |
|
"loss": 1.4695, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8203991130820399, |
|
"grad_norm": 0.3384956419467926, |
|
"learning_rate": 0.00019751248946059014, |
|
"loss": 1.4512, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8425720620842572, |
|
"grad_norm": 0.35126349329948425, |
|
"learning_rate": 0.00019715670893979414, |
|
"loss": 1.4509, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8647450110864745, |
|
"grad_norm": 0.37433576583862305, |
|
"learning_rate": 0.00019677752157800312, |
|
"loss": 1.4386, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8869179600886918, |
|
"grad_norm": 0.39587318897247314, |
|
"learning_rate": 0.0001963750187284379, |
|
"loss": 1.4529, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.403945654630661, |
|
"learning_rate": 0.00019594929736144976, |
|
"loss": 1.4368, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9312638580931264, |
|
"grad_norm": 0.3656717538833618, |
|
"learning_rate": 0.0001955004600411586, |
|
"loss": 1.4208, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9534368070953437, |
|
"grad_norm": 0.409030556678772, |
|
"learning_rate": 0.0001950286149007434, |
|
"loss": 1.4151, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 0.4138728678226471, |
|
"learning_rate": 0.0001945338756163907, |
|
"loss": 1.4441, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9977827050997783, |
|
"grad_norm": 0.4125191271305084, |
|
"learning_rate": 0.00019401636137990816, |
|
"loss": 1.4429, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9977827050997783, |
|
"eval_loss": 1.7484289407730103, |
|
"eval_runtime": 0.374, |
|
"eval_samples_per_second": 2.674, |
|
"eval_steps_per_second": 2.674, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.0199556541019956, |
|
"grad_norm": 0.498823344707489, |
|
"learning_rate": 0.00019347619687000892, |
|
"loss": 1.4079, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.042128603104213, |
|
"grad_norm": 0.3439851999282837, |
|
"learning_rate": 0.00019291351222227432, |
|
"loss": 1.4058, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.06430155210643, |
|
"grad_norm": 0.38840946555137634, |
|
"learning_rate": 0.0001923284429978017, |
|
"loss": 1.4204, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.0864745011086474, |
|
"grad_norm": 0.34416335821151733, |
|
"learning_rate": 0.00019172113015054532, |
|
"loss": 1.4056, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.1086474501108647, |
|
"grad_norm": 0.3941856324672699, |
|
"learning_rate": 0.00019109171999335793, |
|
"loss": 1.4249, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.130820399113082, |
|
"grad_norm": 0.36946943402290344, |
|
"learning_rate": 0.00019044036416274133, |
|
"loss": 1.4083, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.1529933481152994, |
|
"grad_norm": 0.3524357080459595, |
|
"learning_rate": 0.00018976721958231438, |
|
"loss": 1.3953, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.1751662971175167, |
|
"grad_norm": 0.39715054631233215, |
|
"learning_rate": 0.00018907244842500704, |
|
"loss": 1.4021, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.1973392461197339, |
|
"grad_norm": 0.43194007873535156, |
|
"learning_rate": 0.00018835621807399016, |
|
"loss": 1.4057, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 0.38655465841293335, |
|
"learning_rate": 0.0001876187010823496, |
|
"loss": 1.3816, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.2416851441241685, |
|
"grad_norm": 0.330555260181427, |
|
"learning_rate": 0.00018686007513151514, |
|
"loss": 1.3913, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.2638580931263859, |
|
"grad_norm": 0.34880608320236206, |
|
"learning_rate": 0.0001860805229884536, |
|
"loss": 1.407, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.2860310421286032, |
|
"grad_norm": 0.38133811950683594, |
|
"learning_rate": 0.00018528023246163717, |
|
"loss": 1.3834, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.3082039911308203, |
|
"grad_norm": 0.36675792932510376, |
|
"learning_rate": 0.00018445939635579656, |
|
"loss": 1.377, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.3303769401330376, |
|
"grad_norm": 0.383324533700943, |
|
"learning_rate": 0.0001836182124254711, |
|
"loss": 1.3904, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.352549889135255, |
|
"grad_norm": 0.47266829013824463, |
|
"learning_rate": 0.00018275688332736577, |
|
"loss": 1.3839, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.3747228381374723, |
|
"grad_norm": 0.36780834197998047, |
|
"learning_rate": 0.00018187561657152757, |
|
"loss": 1.3729, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.3968957871396896, |
|
"grad_norm": 0.35294458270072937, |
|
"learning_rate": 0.00018097462447135273, |
|
"loss": 1.386, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.4190687361419068, |
|
"grad_norm": 0.36432820558547974, |
|
"learning_rate": 0.00018005412409243606, |
|
"loss": 1.3678, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.441241685144124, |
|
"grad_norm": 0.38247236609458923, |
|
"learning_rate": 0.00017911433720027624, |
|
"loss": 1.3612, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.4634146341463414, |
|
"grad_norm": 0.4539274275302887, |
|
"learning_rate": 0.00017815549020684825, |
|
"loss": 1.3778, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.4855875831485588, |
|
"grad_norm": 0.39669978618621826, |
|
"learning_rate": 0.0001771778141160566, |
|
"loss": 1.38, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.507760532150776, |
|
"grad_norm": 0.3531462550163269, |
|
"learning_rate": 0.0001761815444680822, |
|
"loss": 1.3805, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.5299334811529932, |
|
"grad_norm": 0.3824242055416107, |
|
"learning_rate": 0.00017516692128263648, |
|
"loss": 1.3764, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.5521064301552108, |
|
"grad_norm": 0.3946467936038971, |
|
"learning_rate": 0.00017413418900113605, |
|
"loss": 1.3678, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.5742793791574279, |
|
"grad_norm": 0.38375967741012573, |
|
"learning_rate": 0.00017308359642781242, |
|
"loss": 1.3767, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.5964523281596452, |
|
"grad_norm": 0.418390154838562, |
|
"learning_rate": 0.00017201539666977043, |
|
"loss": 1.3734, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.6186252771618626, |
|
"grad_norm": 0.35475605726242065, |
|
"learning_rate": 0.0001709298470760101, |
|
"loss": 1.3733, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.6407982261640797, |
|
"grad_norm": 0.35535967350006104, |
|
"learning_rate": 0.0001698272091754264, |
|
"loss": 1.3879, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.6629711751662972, |
|
"grad_norm": 0.38182777166366577, |
|
"learning_rate": 0.00016870774861380228, |
|
"loss": 1.3673, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.6851441241685143, |
|
"grad_norm": 0.4077872037887573, |
|
"learning_rate": 0.00016757173508980965, |
|
"loss": 1.3711, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.7073170731707317, |
|
"grad_norm": 0.42192327976226807, |
|
"learning_rate": 0.00016641944229003395, |
|
"loss": 1.376, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.729490022172949, |
|
"grad_norm": 0.36239758133888245, |
|
"learning_rate": 0.00016525114782303807, |
|
"loss": 1.3643, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.7516629711751663, |
|
"grad_norm": 0.37795644998550415, |
|
"learning_rate": 0.00016406713315248136, |
|
"loss": 1.3608, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.7738359201773837, |
|
"grad_norm": 0.33361098170280457, |
|
"learning_rate": 0.00016286768352930973, |
|
"loss": 1.3693, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.7960088691796008, |
|
"grad_norm": 0.36207035183906555, |
|
"learning_rate": 0.0001616530879230335, |
|
"loss": 1.3709, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.34705451130867004, |
|
"learning_rate": 0.00016042363895210946, |
|
"loss": 1.3502, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.8403547671840355, |
|
"grad_norm": 0.339901864528656, |
|
"learning_rate": 0.00015917963281344345, |
|
"loss": 1.3591, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.8625277161862528, |
|
"grad_norm": 0.3383360505104065, |
|
"learning_rate": 0.00015792136921103124, |
|
"loss": 1.3473, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.8847006651884701, |
|
"grad_norm": 0.3463388979434967, |
|
"learning_rate": 0.0001566491512837543, |
|
"loss": 1.3723, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.9068736141906872, |
|
"grad_norm": 0.36131027340888977, |
|
"learning_rate": 0.00015536328553234792, |
|
"loss": 1.3747, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.9290465631929048, |
|
"grad_norm": 0.3317002058029175, |
|
"learning_rate": 0.00015406408174555976, |
|
"loss": 1.3497, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.951219512195122, |
|
"grad_norm": 0.39210090041160583, |
|
"learning_rate": 0.00015275185292551585, |
|
"loss": 1.3622, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.9733924611973392, |
|
"grad_norm": 0.3842204213142395, |
|
"learning_rate": 0.00015142691521231267, |
|
"loss": 1.3575, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.9955654101995566, |
|
"grad_norm": 0.3843972980976105, |
|
"learning_rate": 0.0001500895878078532, |
|
"loss": 1.3594, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.77187979221344, |
|
"eval_runtime": 0.3372, |
|
"eval_samples_per_second": 2.966, |
|
"eval_steps_per_second": 2.966, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.0177383592017737, |
|
"grad_norm": 0.3560327887535095, |
|
"learning_rate": 0.00014874019289894537, |
|
"loss": 1.3498, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.0399113082039912, |
|
"grad_norm": 0.3512759804725647, |
|
"learning_rate": 0.00014737905557968105, |
|
"loss": 1.3208, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.0620842572062084, |
|
"grad_norm": 0.35677239298820496, |
|
"learning_rate": 0.00014600650377311522, |
|
"loss": 1.3367, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.084257206208426, |
|
"grad_norm": 0.3483263850212097, |
|
"learning_rate": 0.00014462286815226314, |
|
"loss": 1.3235, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.106430155210643, |
|
"grad_norm": 0.3995297849178314, |
|
"learning_rate": 0.00014322848206043505, |
|
"loss": 1.3278, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.12860310421286, |
|
"grad_norm": 0.34428730607032776, |
|
"learning_rate": 0.00014182368143092768, |
|
"loss": 1.3253, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.1507760532150777, |
|
"grad_norm": 0.3784777820110321, |
|
"learning_rate": 0.00014040880470609187, |
|
"loss": 1.3062, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.172949002217295, |
|
"grad_norm": 0.35997453331947327, |
|
"learning_rate": 0.00013898419275579522, |
|
"loss": 1.3055, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.1951219512195124, |
|
"grad_norm": 0.37477272748947144, |
|
"learning_rate": 0.00013755018879530075, |
|
"loss": 1.3283, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.2172949002217295, |
|
"grad_norm": 0.42467719316482544, |
|
"learning_rate": 0.00013610713830257954, |
|
"loss": 1.3244, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.2394678492239466, |
|
"grad_norm": 0.37103238701820374, |
|
"learning_rate": 0.00013465538893507907, |
|
"loss": 1.3338, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.261640798226164, |
|
"grad_norm": 0.3519739806652069, |
|
"learning_rate": 0.00013319529044596593, |
|
"loss": 1.3243, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.2838137472283813, |
|
"grad_norm": 0.366401731967926, |
|
"learning_rate": 0.00013172719459986397, |
|
"loss": 1.3188, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.305986696230599, |
|
"grad_norm": 0.39243802428245544, |
|
"learning_rate": 0.0001302514550881076, |
|
"loss": 1.3119, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.328159645232816, |
|
"grad_norm": 0.3964441418647766, |
|
"learning_rate": 0.00012876842744353112, |
|
"loss": 1.3176, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.3503325942350335, |
|
"grad_norm": 0.3520803153514862, |
|
"learning_rate": 0.00012727846895481434, |
|
"loss": 1.3171, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.3725055432372506, |
|
"grad_norm": 0.35095489025115967, |
|
"learning_rate": 0.00012578193858040507, |
|
"loss": 1.3239, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.3946784922394677, |
|
"grad_norm": 0.3672475516796112, |
|
"learning_rate": 0.0001242791968620394, |
|
"loss": 1.3295, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.4168514412416853, |
|
"grad_norm": 0.3635219633579254, |
|
"learning_rate": 0.00012277060583788064, |
|
"loss": 1.3299, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.4390243902439024, |
|
"grad_norm": 0.3647380471229553, |
|
"learning_rate": 0.00012125652895529766, |
|
"loss": 1.3255, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.4611973392461195, |
|
"grad_norm": 0.40251094102859497, |
|
"learning_rate": 0.00011973733098330368, |
|
"loss": 1.324, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.483370288248337, |
|
"grad_norm": 0.36837145686149597, |
|
"learning_rate": 0.0001182133779246766, |
|
"loss": 1.3061, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.505543237250554, |
|
"grad_norm": 0.3746941387653351, |
|
"learning_rate": 0.00011668503692778239, |
|
"loss": 1.3239, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.5277161862527717, |
|
"grad_norm": 0.3703044652938843, |
|
"learning_rate": 0.00011515267619812214, |
|
"loss": 1.3197, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.549889135254989, |
|
"grad_norm": 0.3508300185203552, |
|
"learning_rate": 0.00011361666490962468, |
|
"loss": 1.3231, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.5720620842572064, |
|
"grad_norm": 0.36951854825019836, |
|
"learning_rate": 0.00011207737311570559, |
|
"loss": 1.3205, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.5942350332594235, |
|
"grad_norm": 0.3559792637825012, |
|
"learning_rate": 0.00011053517166011471, |
|
"loss": 1.312, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.6164079822616406, |
|
"grad_norm": 0.37214240431785583, |
|
"learning_rate": 0.00010899043208759305, |
|
"loss": 1.3354, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.638580931263858, |
|
"grad_norm": 0.35989540815353394, |
|
"learning_rate": 0.00010744352655436059, |
|
"loss": 1.3408, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.6607538802660753, |
|
"grad_norm": 0.35892507433891296, |
|
"learning_rate": 0.00010589482773845727, |
|
"loss": 1.3268, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.682926829268293, |
|
"grad_norm": 0.3559330105781555, |
|
"learning_rate": 0.00010434470874995781, |
|
"loss": 1.3102, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.70509977827051, |
|
"grad_norm": 0.3716677725315094, |
|
"learning_rate": 0.00010279354304108271, |
|
"loss": 1.3015, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 0.3538273572921753, |
|
"learning_rate": 0.0001012417043162266, |
|
"loss": 1.3046, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.7494456762749446, |
|
"grad_norm": 0.38784587383270264, |
|
"learning_rate": 9.968956644192617e-05, |
|
"loss": 1.3245, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.7716186252771617, |
|
"grad_norm": 0.3511577248573303, |
|
"learning_rate": 9.813750335678866e-05, |
|
"loss": 1.3181, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.7937915742793793, |
|
"grad_norm": 0.37702956795692444, |
|
"learning_rate": 9.658588898140322e-05, |
|
"loss": 1.3147, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.8159645232815964, |
|
"grad_norm": 0.3686079680919647, |
|
"learning_rate": 9.503509712825658e-05, |
|
"loss": 1.311, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.8381374722838135, |
|
"grad_norm": 0.3567045331001282, |
|
"learning_rate": 9.348550141167472e-05, |
|
"loss": 1.2965, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.860310421286031, |
|
"grad_norm": 0.3552383780479431, |
|
"learning_rate": 9.193747515781224e-05, |
|
"loss": 1.3121, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.882483370288248, |
|
"grad_norm": 0.3610834777355194, |
|
"learning_rate": 9.039139131471128e-05, |
|
"loss": 1.3136, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.9046563192904657, |
|
"grad_norm": 0.36015474796295166, |
|
"learning_rate": 8.884762236245145e-05, |
|
"loss": 1.3131, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.926829268292683, |
|
"grad_norm": 0.3582945764064789, |
|
"learning_rate": 8.730654022341256e-05, |
|
"loss": 1.3237, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.9490022172949004, |
|
"grad_norm": 0.34879669547080994, |
|
"learning_rate": 8.57685161726715e-05, |
|
"loss": 1.3295, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.9711751662971175, |
|
"grad_norm": 0.3675631582736969, |
|
"learning_rate": 8.423392074855545e-05, |
|
"loss": 1.3066, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.9933481152993346, |
|
"grad_norm": 0.34397903084754944, |
|
"learning_rate": 8.270312366337226e-05, |
|
"loss": 1.3154, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.9977827050997785, |
|
"eval_loss": 1.7679176330566406, |
|
"eval_runtime": 0.3532, |
|
"eval_samples_per_second": 2.831, |
|
"eval_steps_per_second": 2.831, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.015521064301552, |
|
"grad_norm": 0.38900941610336304, |
|
"learning_rate": 8.117649371433994e-05, |
|
"loss": 1.2977, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.0376940133037693, |
|
"grad_norm": 0.3746815025806427, |
|
"learning_rate": 7.965439869473664e-05, |
|
"loss": 1.2848, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.059866962305987, |
|
"grad_norm": 0.3739188313484192, |
|
"learning_rate": 7.813720530529243e-05, |
|
"loss": 1.29, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.082039911308204, |
|
"grad_norm": 0.3820560574531555, |
|
"learning_rate": 7.66252790658445e-05, |
|
"loss": 1.2729, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.104212860310421, |
|
"grad_norm": 0.41729655861854553, |
|
"learning_rate": 7.511898422727642e-05, |
|
"loss": 1.2701, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.1263858093126387, |
|
"grad_norm": 0.3661589026451111, |
|
"learning_rate": 7.361868368376364e-05, |
|
"loss": 1.279, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.1485587583148558, |
|
"grad_norm": 0.3968789875507355, |
|
"learning_rate": 7.212473888534546e-05, |
|
"loss": 1.2863, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.1707317073170733, |
|
"grad_norm": 0.3675246238708496, |
|
"learning_rate": 7.063750975084518e-05, |
|
"loss": 1.2811, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.1929046563192904, |
|
"grad_norm": 0.39409342408180237, |
|
"learning_rate": 6.915735458115884e-05, |
|
"loss": 1.2765, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.2150776053215075, |
|
"grad_norm": 0.3648887574672699, |
|
"learning_rate": 6.768462997293413e-05, |
|
"loss": 1.2882, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.237250554323725, |
|
"grad_norm": 0.3808637857437134, |
|
"learning_rate": 6.62196907326595e-05, |
|
"loss": 1.2973, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.259423503325942, |
|
"grad_norm": 0.388408362865448, |
|
"learning_rate": 6.476288979118496e-05, |
|
"loss": 1.3099, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.2815964523281598, |
|
"grad_norm": 0.3849842846393585, |
|
"learning_rate": 6.331457811869437e-05, |
|
"loss": 1.2919, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.303769401330377, |
|
"grad_norm": 0.3851810693740845, |
|
"learning_rate": 6.187510464015022e-05, |
|
"loss": 1.2793, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.3259423503325944, |
|
"grad_norm": 0.3811032474040985, |
|
"learning_rate": 6.0444816151231375e-05, |
|
"loss": 1.2803, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.3481152993348116, |
|
"grad_norm": 0.36981481313705444, |
|
"learning_rate": 5.902405723478346e-05, |
|
"loss": 1.2612, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.3702882483370287, |
|
"grad_norm": 0.37967202067375183, |
|
"learning_rate": 5.76131701778025e-05, |
|
"loss": 1.2924, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.3924611973392462, |
|
"grad_norm": 0.39395394921302795, |
|
"learning_rate": 5.621249488897176e-05, |
|
"loss": 1.2714, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.4146341463414633, |
|
"grad_norm": 0.38406023383140564, |
|
"learning_rate": 5.4822368816771406e-05, |
|
"loss": 1.2885, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.436807095343681, |
|
"grad_norm": 0.372646301984787, |
|
"learning_rate": 5.344312686818106e-05, |
|
"loss": 1.2791, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.458980044345898, |
|
"grad_norm": 0.3789440393447876, |
|
"learning_rate": 5.207510132799436e-05, |
|
"loss": 1.2918, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.481152993348115, |
|
"grad_norm": 0.3723030388355255, |
|
"learning_rate": 5.0718621778765476e-05, |
|
"loss": 1.2772, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.5033259423503327, |
|
"grad_norm": 0.373038649559021, |
|
"learning_rate": 4.9374015021406914e-05, |
|
"loss": 1.2677, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.52549889135255, |
|
"grad_norm": 0.3948681354522705, |
|
"learning_rate": 4.804160499645667e-05, |
|
"loss": 1.2791, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.5476718403547673, |
|
"grad_norm": 0.3771447241306305, |
|
"learning_rate": 4.6721712706035236e-05, |
|
"loss": 1.2895, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.5698447893569845, |
|
"grad_norm": 0.37859615683555603, |
|
"learning_rate": 4.5414656136510334e-05, |
|
"loss": 1.2841, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.5920177383592016, |
|
"grad_norm": 0.3813944160938263, |
|
"learning_rate": 4.412075018188805e-05, |
|
"loss": 1.2785, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.614190687361419, |
|
"grad_norm": 0.37438470125198364, |
|
"learning_rate": 4.2840306567949076e-05, |
|
"loss": 1.2803, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.6363636363636362, |
|
"grad_norm": 0.36801373958587646, |
|
"learning_rate": 4.157363377714819e-05, |
|
"loss": 1.3009, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.658536585365854, |
|
"grad_norm": 0.37997427582740784, |
|
"learning_rate": 4.0321036974295156e-05, |
|
"loss": 1.2833, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.680709534368071, |
|
"grad_norm": 0.36499011516571045, |
|
"learning_rate": 3.9082817933035134e-05, |
|
"loss": 1.2836, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.7028824833702885, |
|
"grad_norm": 0.3749338984489441, |
|
"learning_rate": 3.785927496314543e-05, |
|
"loss": 1.2869, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.7250554323725056, |
|
"grad_norm": 0.3751271665096283, |
|
"learning_rate": 3.6650702838667464e-05, |
|
"loss": 1.2732, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.7472283813747227, |
|
"grad_norm": 0.3683511018753052, |
|
"learning_rate": 3.5457392726890236e-05, |
|
"loss": 1.2787, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.7694013303769403, |
|
"grad_norm": 0.38828131556510925, |
|
"learning_rate": 3.427963211820274e-05, |
|
"loss": 1.2811, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.7915742793791574, |
|
"grad_norm": 0.37317919731140137, |
|
"learning_rate": 3.3117704756832226e-05, |
|
"loss": 1.289, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.8137472283813745, |
|
"grad_norm": 0.3718849718570709, |
|
"learning_rate": 3.197189057248491e-05, |
|
"loss": 1.2764, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.835920177383592, |
|
"grad_norm": 0.3691292107105255, |
|
"learning_rate": 3.0842465612905837e-05, |
|
"loss": 1.2862, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.858093126385809, |
|
"grad_norm": 0.3802024722099304, |
|
"learning_rate": 2.9729701977374035e-05, |
|
"loss": 1.2838, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.8802660753880267, |
|
"grad_norm": 0.38017788529396057, |
|
"learning_rate": 2.863386775114848e-05, |
|
"loss": 1.2934, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.902439024390244, |
|
"grad_norm": 0.38141724467277527, |
|
"learning_rate": 2.7555226940881583e-05, |
|
"loss": 1.2829, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.9246119733924614, |
|
"grad_norm": 0.38505685329437256, |
|
"learning_rate": 2.6494039411015193e-05, |
|
"loss": 1.3002, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.9467849223946785, |
|
"grad_norm": 0.3752408027648926, |
|
"learning_rate": 2.545056082117433e-05, |
|
"loss": 1.2971, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.9689578713968956, |
|
"grad_norm": 0.3711921274662018, |
|
"learning_rate": 2.4425042564574184e-05, |
|
"loss": 1.282, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.991130820399113, |
|
"grad_norm": 0.37682175636291504, |
|
"learning_rate": 2.3417731707454737e-05, |
|
"loss": 1.268, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.796044945716858, |
|
"eval_runtime": 0.3372, |
|
"eval_samples_per_second": 2.965, |
|
"eval_steps_per_second": 2.965, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 4.013303769401331, |
|
"grad_norm": 0.37139764428138733, |
|
"learning_rate": 2.242887092955801e-05, |
|
"loss": 1.2666, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.035476718403547, |
|
"grad_norm": 0.3784310221672058, |
|
"learning_rate": 2.1458698465662187e-05, |
|
"loss": 1.2518, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.057649667405765, |
|
"grad_norm": 0.3832833766937256, |
|
"learning_rate": 2.0507448048186208e-05, |
|
"loss": 1.2522, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.0798226164079825, |
|
"grad_norm": 0.38216501474380493, |
|
"learning_rate": 1.957534885087944e-05, |
|
"loss": 1.2616, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.101995565410199, |
|
"grad_norm": 0.37775281071662903, |
|
"learning_rate": 1.866262543360958e-05, |
|
"loss": 1.2696, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.124168514412417, |
|
"grad_norm": 0.3922324478626251, |
|
"learning_rate": 1.7769497688261973e-05, |
|
"loss": 1.2572, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.146341463414634, |
|
"grad_norm": 0.39753395318984985, |
|
"learning_rate": 1.6896180785763593e-05, |
|
"loss": 1.2574, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.168514412416852, |
|
"grad_norm": 0.39192479848861694, |
|
"learning_rate": 1.604288512424439e-05, |
|
"loss": 1.2624, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.1906873614190685, |
|
"grad_norm": 0.3945992588996887, |
|
"learning_rate": 1.520981627834851e-05, |
|
"loss": 1.2756, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.212860310421286, |
|
"grad_norm": 0.37924352288246155, |
|
"learning_rate": 1.4397174949707725e-05, |
|
"loss": 1.257, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.235033259423504, |
|
"grad_norm": 0.373811274766922, |
|
"learning_rate": 1.3605156918588469e-05, |
|
"loss": 1.2703, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.25720620842572, |
|
"grad_norm": 0.3824734389781952, |
|
"learning_rate": 1.2833952996724863e-05, |
|
"loss": 1.2635, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.279379157427938, |
|
"grad_norm": 0.3803390562534332, |
|
"learning_rate": 1.208374898134883e-05, |
|
"loss": 1.2715, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.301552106430155, |
|
"grad_norm": 0.38187548518180847, |
|
"learning_rate": 1.1354725610427807e-05, |
|
"loss": 1.2775, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.323725055432373, |
|
"grad_norm": 0.3768659234046936, |
|
"learning_rate": 1.0647058519121821e-05, |
|
"loss": 1.2394, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.34589800443459, |
|
"grad_norm": 0.3807064890861511, |
|
"learning_rate": 9.960918197469771e-06, |
|
"loss": 1.2673, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.368070953436807, |
|
"grad_norm": 0.38065963983535767, |
|
"learning_rate": 9.296469949315156e-06, |
|
"loss": 1.2713, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.390243902439025, |
|
"grad_norm": 0.3850005865097046, |
|
"learning_rate": 8.653873852481364e-06, |
|
"loss": 1.2468, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.412416851441241, |
|
"grad_norm": 0.3868677020072937, |
|
"learning_rate": 8.033284720205946e-06, |
|
"loss": 1.2654, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.434589800443459, |
|
"grad_norm": 0.37283623218536377, |
|
"learning_rate": 7.434852063843278e-06, |
|
"loss": 1.2616, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.4567627494456765, |
|
"grad_norm": 0.3784988522529602, |
|
"learning_rate": 6.858720056844614e-06, |
|
"loss": 1.2602, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.478935698447893, |
|
"grad_norm": 0.389172226190567, |
|
"learning_rate": 6.3050275000238414e-06, |
|
"loss": 1.2671, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.501108647450111, |
|
"grad_norm": 0.37874796986579895, |
|
"learning_rate": 5.77390778811796e-06, |
|
"loss": 1.2743, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.523281596452328, |
|
"grad_norm": 0.39037781953811646, |
|
"learning_rate": 5.265488877649816e-06, |
|
"loss": 1.2684, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 0.3832471966743469, |
|
"learning_rate": 4.7798932561009865e-06, |
|
"loss": 1.2674, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.5676274944567625, |
|
"grad_norm": 0.3740783631801605, |
|
"learning_rate": 4.317237912402316e-06, |
|
"loss": 1.2686, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.58980044345898, |
|
"grad_norm": 0.38245856761932373, |
|
"learning_rate": 3.877634308749078e-06, |
|
"loss": 1.2457, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.611973392461198, |
|
"grad_norm": 0.38514626026153564, |
|
"learning_rate": 3.461188353747702e-06, |
|
"loss": 1.2631, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.634146341463414, |
|
"grad_norm": 0.38133639097213745, |
|
"learning_rate": 3.068000376900515e-06, |
|
"loss": 1.2775, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.656319290465632, |
|
"grad_norm": 0.3845633268356323, |
|
"learning_rate": 2.6981651044344024e-06, |
|
"loss": 1.2642, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.678492239467849, |
|
"grad_norm": 0.38162142038345337, |
|
"learning_rate": 2.3517716364795385e-06, |
|
"loss": 1.2692, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.700665188470067, |
|
"grad_norm": 0.392133504152298, |
|
"learning_rate": 2.028903425603612e-06, |
|
"loss": 1.2628, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.722838137472284, |
|
"grad_norm": 0.374054878950119, |
|
"learning_rate": 1.7296382567064672e-06, |
|
"loss": 1.2611, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.745011086474501, |
|
"grad_norm": 0.38326382637023926, |
|
"learning_rate": 1.4540482282803137e-06, |
|
"loss": 1.2453, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.767184035476719, |
|
"grad_norm": 0.39525189995765686, |
|
"learning_rate": 1.2021997350399106e-06, |
|
"loss": 1.2849, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.789356984478935, |
|
"grad_norm": 0.38262608647346497, |
|
"learning_rate": 9.741534519267736e-07, |
|
"loss": 1.2696, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.811529933481153, |
|
"grad_norm": 0.38532087206840515, |
|
"learning_rate": 7.699643194915784e-07, |
|
"loss": 1.26, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.8337028824833705, |
|
"grad_norm": 0.3865302503108978, |
|
"learning_rate": 5.896815306578818e-07, |
|
"loss": 1.2568, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.855875831485587, |
|
"grad_norm": 0.3837885856628418, |
|
"learning_rate": 4.333485188706576e-07, |
|
"loss": 1.2644, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.878048780487805, |
|
"grad_norm": 0.37372496724128723, |
|
"learning_rate": 3.0100294763238946e-07, |
|
"loss": 1.2764, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.900221729490022, |
|
"grad_norm": 0.38477322459220886, |
|
"learning_rate": 1.9267670142926187e-07, |
|
"loss": 1.2529, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.922394678492239, |
|
"grad_norm": 0.3782412111759186, |
|
"learning_rate": 1.0839587804954975e-07, |
|
"loss": 1.2552, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.9445676274944566, |
|
"grad_norm": 0.3779396116733551, |
|
"learning_rate": 4.818078229622547e-08, |
|
"loss": 1.2773, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 4.966740576496674, |
|
"grad_norm": 0.38919439911842346, |
|
"learning_rate": 1.2045921095127366e-08, |
|
"loss": 1.2647, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.988913525498892, |
|
"grad_norm": 0.38752755522727966, |
|
"learning_rate": 0.0, |
|
"loss": 1.2695, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.988913525498892, |
|
"eval_loss": 1.8087279796600342, |
|
"eval_runtime": 0.3391, |
|
"eval_samples_per_second": 2.949, |
|
"eval_steps_per_second": 2.949, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.988913525498892, |
|
"step": 1125, |
|
"total_flos": 1.6629843858229821e+18, |
|
"train_loss": 1.390608725865682, |
|
"train_runtime": 3610.9267, |
|
"train_samples_per_second": 9.977, |
|
"train_steps_per_second": 0.312 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1125, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6629843858229821e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|