diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,4986 @@ +{ + "best_metric": 0.22935771942138672, + "best_model_checkpoint": "output/output__lora/checkpoint-600", + "epoch": 1.7010935601458081, + "eval_steps": 100, + "global_step": 700, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.002430133657351154, + "grad_norm": 2.0297858715057373, + "learning_rate": 0.0, + "loss": 0.3878, + "step": 1 + }, + { + "epoch": 0.004860267314702308, + "grad_norm": 2.0333988666534424, + "learning_rate": 8.859191006777897e-06, + "loss": 0.4074, + "step": 2 + }, + { + "epoch": 0.007290400972053463, + "grad_norm": 2.1134026050567627, + "learning_rate": 1.4041485532469073e-05, + "loss": 0.3097, + "step": 3 + }, + { + "epoch": 0.009720534629404616, + "grad_norm": 2.1784684658050537, + "learning_rate": 1.7718382013555794e-05, + "loss": 0.4578, + "step": 4 + }, + { + "epoch": 0.012150668286755772, + "grad_norm": 1.0290217399597168, + "learning_rate": 2.0570404496611053e-05, + "loss": 0.3389, + "step": 5 + }, + { + "epoch": 0.014580801944106925, + "grad_norm": 3.486976385116577, + "learning_rate": 2.2900676539246968e-05, + "loss": 0.2617, + "step": 6 + }, + { + "epoch": 0.01701093560145808, + "grad_norm": 1.7177977561950684, + "learning_rate": 2.4870893478326387e-05, + "loss": 0.5574, + "step": 7 + }, + { + "epoch": 0.019441069258809233, + "grad_norm": 0.8118329048156738, + "learning_rate": 2.6577573020333684e-05, + "loss": 0.4099, + "step": 8 + }, + { + "epoch": 0.02187120291616039, + "grad_norm": 2.2652881145477295, + "learning_rate": 2.8082971064938146e-05, + "loss": 0.3207, + "step": 9 + }, + { + "epoch": 0.024301336573511544, + "grad_norm": 1.2344919443130493, + "learning_rate": 2.9429595503388953e-05, + "loss": 0.2961, + "step": 10 + }, + { + "epoch": 0.026731470230862697, + "grad_norm": 1.2169722318649292, + "learning_rate": 3.064776548439465e-05, + "loss": 0.3689, + "step": 11 + }, + { + "epoch": 0.02916160388821385, + "grad_norm": 1.162546992301941, + "learning_rate": 3.1759867546024865e-05, + "loss": 0.351, + "step": 12 + }, + { + "epoch": 0.031591737545565005, + "grad_norm": 2.5353000164031982, + "learning_rate": 3.2782902272079295e-05, + "loss": 0.2424, + "step": 13 + }, + { + "epoch": 0.03402187120291616, + "grad_norm": 1.6772927045822144, + "learning_rate": 3.373008448510428e-05, + "loss": 0.2366, + "step": 14 + }, + { + "epoch": 0.03645200486026731, + "grad_norm": 1.5499287843704224, + "learning_rate": 3.4611890029080124e-05, + "loss": 0.3358, + "step": 15 + }, + { + "epoch": 0.038882138517618466, + "grad_norm": 1.383988618850708, + "learning_rate": 3.543676402711159e-05, + "loss": 0.3495, + "step": 16 + }, + { + "epoch": 0.041312272174969626, + "grad_norm": 1.4687221050262451, + "learning_rate": 3.621161404374383e-05, + "loss": 0.3973, + "step": 17 + }, + { + "epoch": 0.04374240583232078, + "grad_norm": 1.1256321668624878, + "learning_rate": 3.694216207171603e-05, + "loss": 0.2556, + "step": 18 + }, + { + "epoch": 0.046172539489671933, + "grad_norm": 0.9001079797744751, + "learning_rate": 3.76332012245438e-05, + "loss": 0.2283, + "step": 19 + }, + { + "epoch": 0.04860267314702309, + "grad_norm": 2.164158582687378, + "learning_rate": 3.8288786510166846e-05, + "loss": 0.2065, + "step": 20 + }, + { + "epoch": 0.05103280680437424, + "grad_norm": 0.6704838871955872, + "learning_rate": 3.8912379010795455e-05, + "loss": 0.3622, + "step": 21 + }, + { + "epoch": 0.053462940461725394, + "grad_norm": 1.380202293395996, + "learning_rate": 3.9506956491172545e-05, + "loss": 0.2949, + "step": 22 + }, + { + "epoch": 0.05589307411907655, + "grad_norm": 1.281702995300293, + "learning_rate": 4.007509939970292e-05, + "loss": 0.1766, + "step": 23 + }, + { + "epoch": 0.0583232077764277, + "grad_norm": 1.581787347793579, + "learning_rate": 4.061905855280276e-05, + "loss": 0.2676, + "step": 24 + }, + { + "epoch": 0.060753341433778855, + "grad_norm": 1.8878554105758667, + "learning_rate": 4.1140808993222106e-05, + "loss": 0.2572, + "step": 25 + }, + { + "epoch": 0.06318347509113001, + "grad_norm": 0.6384598612785339, + "learning_rate": 4.164209327885719e-05, + "loss": 0.231, + "step": 26 + }, + { + "epoch": 0.06561360874848117, + "grad_norm": 0.9673507809638977, + "learning_rate": 4.2124456597407214e-05, + "loss": 0.1772, + "step": 27 + }, + { + "epoch": 0.06804374240583232, + "grad_norm": 1.0678174495697021, + "learning_rate": 4.258927549188218e-05, + "loss": 0.2025, + "step": 28 + }, + { + "epoch": 0.07047387606318348, + "grad_norm": 0.9189561009407043, + "learning_rate": 4.303778154313212e-05, + "loss": 0.2345, + "step": 29 + }, + { + "epoch": 0.07290400972053462, + "grad_norm": 1.3609440326690674, + "learning_rate": 4.347108103585803e-05, + "loss": 0.2201, + "step": 30 + }, + { + "epoch": 0.07533414337788578, + "grad_norm": 1.3813296556472778, + "learning_rate": 4.389017139879164e-05, + "loss": 0.2438, + "step": 31 + }, + { + "epoch": 0.07776427703523693, + "grad_norm": 1.1185611486434937, + "learning_rate": 4.429595503388948e-05, + "loss": 0.1972, + "step": 32 + }, + { + "epoch": 0.08019441069258809, + "grad_norm": 0.876745343208313, + "learning_rate": 4.468925101686371e-05, + "loss": 0.2754, + "step": 33 + }, + { + "epoch": 0.08262454434993925, + "grad_norm": 0.7372068762779236, + "learning_rate": 4.507080505052173e-05, + "loss": 0.2719, + "step": 34 + }, + { + "epoch": 0.0850546780072904, + "grad_norm": 1.4229792356491089, + "learning_rate": 4.544129797493744e-05, + "loss": 0.3219, + "step": 35 + }, + { + "epoch": 0.08748481166464156, + "grad_norm": 1.4566582441329956, + "learning_rate": 4.5801353078493936e-05, + "loss": 0.1257, + "step": 36 + }, + { + "epoch": 0.0899149453219927, + "grad_norm": 1.217944860458374, + "learning_rate": 4.615154240700883e-05, + "loss": 0.3138, + "step": 37 + }, + { + "epoch": 0.09234507897934387, + "grad_norm": 1.882925033569336, + "learning_rate": 4.6492392231321696e-05, + "loss": 0.219, + "step": 38 + }, + { + "epoch": 0.09477521263669501, + "grad_norm": 0.8372556567192078, + "learning_rate": 4.682438780454837e-05, + "loss": 0.27, + "step": 39 + }, + { + "epoch": 0.09720534629404617, + "grad_norm": 0.9709153771400452, + "learning_rate": 4.714797751694474e-05, + "loss": 0.2759, + "step": 40 + }, + { + "epoch": 0.09963547995139732, + "grad_norm": 1.1802537441253662, + "learning_rate": 4.7463576537657414e-05, + "loss": 0.2411, + "step": 41 + }, + { + "epoch": 0.10206561360874848, + "grad_norm": 0.7289960980415344, + "learning_rate": 4.777157001757336e-05, + "loss": 0.2755, + "step": 42 + }, + { + "epoch": 0.10449574726609964, + "grad_norm": 0.8493523001670837, + "learning_rate": 4.8072315915252694e-05, + "loss": 0.3177, + "step": 43 + }, + { + "epoch": 0.10692588092345079, + "grad_norm": 1.0613563060760498, + "learning_rate": 4.8366147497950435e-05, + "loss": 0.2056, + "step": 44 + }, + { + "epoch": 0.10935601458080195, + "grad_norm": 1.1137558221817017, + "learning_rate": 4.8653375561549195e-05, + "loss": 0.293, + "step": 45 + }, + { + "epoch": 0.1117861482381531, + "grad_norm": 0.6552504301071167, + "learning_rate": 4.8934290406480814e-05, + "loss": 0.2478, + "step": 46 + }, + { + "epoch": 0.11421628189550426, + "grad_norm": 0.9905387759208679, + "learning_rate": 4.920916360113129e-05, + "loss": 0.2488, + "step": 47 + }, + { + "epoch": 0.1166464155528554, + "grad_norm": 1.101691484451294, + "learning_rate": 4.947824955958066e-05, + "loss": 0.1583, + "step": 48 + }, + { + "epoch": 0.11907654921020656, + "grad_norm": 1.3675787448883057, + "learning_rate": 4.9741786956652774e-05, + "loss": 0.3629, + "step": 49 + }, + { + "epoch": 0.12150668286755771, + "grad_norm": 2.6356711387634277, + "learning_rate": 5e-05, + "loss": 0.285, + "step": 50 + }, + { + "epoch": 0.12393681652490887, + "grad_norm": 1.5550183057785034, + "learning_rate": 5e-05, + "loss": 0.1688, + "step": 51 + }, + { + "epoch": 0.12636695018226002, + "grad_norm": 1.7116848230361938, + "learning_rate": 5e-05, + "loss": 0.2495, + "step": 52 + }, + { + "epoch": 0.12879708383961117, + "grad_norm": 1.3964793682098389, + "learning_rate": 5e-05, + "loss": 0.276, + "step": 53 + }, + { + "epoch": 0.13122721749696234, + "grad_norm": 0.8001569509506226, + "learning_rate": 5e-05, + "loss": 0.2976, + "step": 54 + }, + { + "epoch": 0.1336573511543135, + "grad_norm": 1.331112027168274, + "learning_rate": 5e-05, + "loss": 0.2725, + "step": 55 + }, + { + "epoch": 0.13608748481166463, + "grad_norm": 1.0783005952835083, + "learning_rate": 5e-05, + "loss": 0.188, + "step": 56 + }, + { + "epoch": 0.1385176184690158, + "grad_norm": 0.9507465958595276, + "learning_rate": 5e-05, + "loss": 0.2917, + "step": 57 + }, + { + "epoch": 0.14094775212636695, + "grad_norm": 0.8285686373710632, + "learning_rate": 5e-05, + "loss": 0.3322, + "step": 58 + }, + { + "epoch": 0.1433778857837181, + "grad_norm": 0.8201387524604797, + "learning_rate": 5e-05, + "loss": 0.2296, + "step": 59 + }, + { + "epoch": 0.14580801944106925, + "grad_norm": 0.743626594543457, + "learning_rate": 5e-05, + "loss": 0.1785, + "step": 60 + }, + { + "epoch": 0.14823815309842042, + "grad_norm": 0.9876251816749573, + "learning_rate": 5e-05, + "loss": 0.3416, + "step": 61 + }, + { + "epoch": 0.15066828675577157, + "grad_norm": 1.6088169813156128, + "learning_rate": 5e-05, + "loss": 0.2513, + "step": 62 + }, + { + "epoch": 0.15309842041312272, + "grad_norm": 1.2406659126281738, + "learning_rate": 5e-05, + "loss": 0.2856, + "step": 63 + }, + { + "epoch": 0.15552855407047386, + "grad_norm": 1.0257083177566528, + "learning_rate": 5e-05, + "loss": 0.1392, + "step": 64 + }, + { + "epoch": 0.15795868772782504, + "grad_norm": 0.7829582691192627, + "learning_rate": 5e-05, + "loss": 0.248, + "step": 65 + }, + { + "epoch": 0.16038882138517618, + "grad_norm": 0.7451558113098145, + "learning_rate": 5e-05, + "loss": 0.2826, + "step": 66 + }, + { + "epoch": 0.16281895504252733, + "grad_norm": 0.8115066885948181, + "learning_rate": 5e-05, + "loss": 0.3322, + "step": 67 + }, + { + "epoch": 0.1652490886998785, + "grad_norm": 0.8748694658279419, + "learning_rate": 5e-05, + "loss": 0.2057, + "step": 68 + }, + { + "epoch": 0.16767922235722965, + "grad_norm": 0.6844836473464966, + "learning_rate": 5e-05, + "loss": 0.2062, + "step": 69 + }, + { + "epoch": 0.1701093560145808, + "grad_norm": 0.9419387578964233, + "learning_rate": 5e-05, + "loss": 0.169, + "step": 70 + }, + { + "epoch": 0.17253948967193194, + "grad_norm": 0.8529036045074463, + "learning_rate": 5e-05, + "loss": 0.28, + "step": 71 + }, + { + "epoch": 0.17496962332928312, + "grad_norm": 0.8096457719802856, + "learning_rate": 5e-05, + "loss": 0.1699, + "step": 72 + }, + { + "epoch": 0.17739975698663427, + "grad_norm": 0.7595834136009216, + "learning_rate": 5e-05, + "loss": 0.2321, + "step": 73 + }, + { + "epoch": 0.1798298906439854, + "grad_norm": 0.6918485164642334, + "learning_rate": 5e-05, + "loss": 0.12, + "step": 74 + }, + { + "epoch": 0.1822600243013366, + "grad_norm": 0.8465882539749146, + "learning_rate": 5e-05, + "loss": 0.2344, + "step": 75 + }, + { + "epoch": 0.18469015795868773, + "grad_norm": 1.27568519115448, + "learning_rate": 5e-05, + "loss": 0.3018, + "step": 76 + }, + { + "epoch": 0.18712029161603888, + "grad_norm": 0.907692015171051, + "learning_rate": 5e-05, + "loss": 0.2154, + "step": 77 + }, + { + "epoch": 0.18955042527339003, + "grad_norm": 0.9362420439720154, + "learning_rate": 5e-05, + "loss": 0.2024, + "step": 78 + }, + { + "epoch": 0.1919805589307412, + "grad_norm": 1.1268069744110107, + "learning_rate": 5e-05, + "loss": 0.1509, + "step": 79 + }, + { + "epoch": 0.19441069258809235, + "grad_norm": 2.944066047668457, + "learning_rate": 5e-05, + "loss": 0.1669, + "step": 80 + }, + { + "epoch": 0.1968408262454435, + "grad_norm": 0.8719034790992737, + "learning_rate": 5e-05, + "loss": 0.2229, + "step": 81 + }, + { + "epoch": 0.19927095990279464, + "grad_norm": 0.7963365912437439, + "learning_rate": 5e-05, + "loss": 0.1485, + "step": 82 + }, + { + "epoch": 0.20170109356014582, + "grad_norm": 1.1861976385116577, + "learning_rate": 5e-05, + "loss": 0.1774, + "step": 83 + }, + { + "epoch": 0.20413122721749696, + "grad_norm": 1.029300570487976, + "learning_rate": 5e-05, + "loss": 0.2762, + "step": 84 + }, + { + "epoch": 0.2065613608748481, + "grad_norm": 0.7859634757041931, + "learning_rate": 5e-05, + "loss": 0.263, + "step": 85 + }, + { + "epoch": 0.20899149453219928, + "grad_norm": 1.2088333368301392, + "learning_rate": 5e-05, + "loss": 0.2248, + "step": 86 + }, + { + "epoch": 0.21142162818955043, + "grad_norm": 0.9491986036300659, + "learning_rate": 5e-05, + "loss": 0.1388, + "step": 87 + }, + { + "epoch": 0.21385176184690158, + "grad_norm": 1.3867732286453247, + "learning_rate": 5e-05, + "loss": 0.1932, + "step": 88 + }, + { + "epoch": 0.21628189550425272, + "grad_norm": 1.2418168783187866, + "learning_rate": 5e-05, + "loss": 0.2703, + "step": 89 + }, + { + "epoch": 0.2187120291616039, + "grad_norm": 1.514169692993164, + "learning_rate": 5e-05, + "loss": 0.3437, + "step": 90 + }, + { + "epoch": 0.22114216281895505, + "grad_norm": 1.1384562253952026, + "learning_rate": 5e-05, + "loss": 0.3157, + "step": 91 + }, + { + "epoch": 0.2235722964763062, + "grad_norm": 1.0910252332687378, + "learning_rate": 5e-05, + "loss": 0.3296, + "step": 92 + }, + { + "epoch": 0.22600243013365734, + "grad_norm": 0.8576286435127258, + "learning_rate": 5e-05, + "loss": 0.2572, + "step": 93 + }, + { + "epoch": 0.2284325637910085, + "grad_norm": 1.0798954963684082, + "learning_rate": 5e-05, + "loss": 0.2669, + "step": 94 + }, + { + "epoch": 0.23086269744835966, + "grad_norm": 1.0142358541488647, + "learning_rate": 5e-05, + "loss": 0.2607, + "step": 95 + }, + { + "epoch": 0.2332928311057108, + "grad_norm": 0.8707209825515747, + "learning_rate": 5e-05, + "loss": 0.2206, + "step": 96 + }, + { + "epoch": 0.23572296476306198, + "grad_norm": 1.038823127746582, + "learning_rate": 5e-05, + "loss": 0.1648, + "step": 97 + }, + { + "epoch": 0.23815309842041313, + "grad_norm": 1.3246694803237915, + "learning_rate": 5e-05, + "loss": 0.317, + "step": 98 + }, + { + "epoch": 0.24058323207776428, + "grad_norm": 1.0180795192718506, + "learning_rate": 5e-05, + "loss": 0.2653, + "step": 99 + }, + { + "epoch": 0.24301336573511542, + "grad_norm": 1.803346037864685, + "learning_rate": 5e-05, + "loss": 0.1221, + "step": 100 + }, + { + "epoch": 0.24301336573511542, + "eval_loss": 0.2808912396430969, + "eval_runtime": 508.6685, + "eval_samples_per_second": 5.402, + "eval_steps_per_second": 0.676, + "step": 100 + }, + { + "epoch": 0.2454434993924666, + "grad_norm": 1.4270009994506836, + "learning_rate": 5e-05, + "loss": 0.1513, + "step": 101 + }, + { + "epoch": 0.24787363304981774, + "grad_norm": 1.5230388641357422, + "learning_rate": 5e-05, + "loss": 0.2563, + "step": 102 + }, + { + "epoch": 0.2503037667071689, + "grad_norm": 1.0142149925231934, + "learning_rate": 5e-05, + "loss": 0.2021, + "step": 103 + }, + { + "epoch": 0.25273390036452004, + "grad_norm": 1.224928617477417, + "learning_rate": 5e-05, + "loss": 0.2227, + "step": 104 + }, + { + "epoch": 0.2551640340218712, + "grad_norm": 0.8966079950332642, + "learning_rate": 5e-05, + "loss": 0.1416, + "step": 105 + }, + { + "epoch": 0.25759416767922233, + "grad_norm": 4.392244815826416, + "learning_rate": 5e-05, + "loss": 0.1297, + "step": 106 + }, + { + "epoch": 0.2600243013365735, + "grad_norm": 0.7403039932250977, + "learning_rate": 5e-05, + "loss": 0.1383, + "step": 107 + }, + { + "epoch": 0.2624544349939247, + "grad_norm": 0.7826500535011292, + "learning_rate": 5e-05, + "loss": 0.1933, + "step": 108 + }, + { + "epoch": 0.2648845686512758, + "grad_norm": 1.2112810611724854, + "learning_rate": 5e-05, + "loss": 0.2118, + "step": 109 + }, + { + "epoch": 0.267314702308627, + "grad_norm": 1.2826062440872192, + "learning_rate": 5e-05, + "loss": 0.2028, + "step": 110 + }, + { + "epoch": 0.26974483596597815, + "grad_norm": 0.9675102829933167, + "learning_rate": 5e-05, + "loss": 0.1481, + "step": 111 + }, + { + "epoch": 0.27217496962332927, + "grad_norm": 1.2551299333572388, + "learning_rate": 5e-05, + "loss": 0.2172, + "step": 112 + }, + { + "epoch": 0.27460510328068044, + "grad_norm": 1.2526607513427734, + "learning_rate": 5e-05, + "loss": 0.213, + "step": 113 + }, + { + "epoch": 0.2770352369380316, + "grad_norm": 1.102669358253479, + "learning_rate": 5e-05, + "loss": 0.2387, + "step": 114 + }, + { + "epoch": 0.27946537059538273, + "grad_norm": 1.1452562808990479, + "learning_rate": 5e-05, + "loss": 0.2224, + "step": 115 + }, + { + "epoch": 0.2818955042527339, + "grad_norm": 1.4839582443237305, + "learning_rate": 5e-05, + "loss": 0.3572, + "step": 116 + }, + { + "epoch": 0.284325637910085, + "grad_norm": 0.7019845247268677, + "learning_rate": 5e-05, + "loss": 0.1531, + "step": 117 + }, + { + "epoch": 0.2867557715674362, + "grad_norm": 1.1344879865646362, + "learning_rate": 5e-05, + "loss": 0.1234, + "step": 118 + }, + { + "epoch": 0.2891859052247874, + "grad_norm": 0.7753707766532898, + "learning_rate": 5e-05, + "loss": 0.3054, + "step": 119 + }, + { + "epoch": 0.2916160388821385, + "grad_norm": 1.2467390298843384, + "learning_rate": 5e-05, + "loss": 0.0979, + "step": 120 + }, + { + "epoch": 0.29404617253948967, + "grad_norm": 0.8829241991043091, + "learning_rate": 5e-05, + "loss": 0.1251, + "step": 121 + }, + { + "epoch": 0.29647630619684084, + "grad_norm": 0.9316434860229492, + "learning_rate": 5e-05, + "loss": 0.1723, + "step": 122 + }, + { + "epoch": 0.29890643985419196, + "grad_norm": 0.9735895395278931, + "learning_rate": 5e-05, + "loss": 0.1992, + "step": 123 + }, + { + "epoch": 0.30133657351154314, + "grad_norm": 1.0356009006500244, + "learning_rate": 5e-05, + "loss": 0.2912, + "step": 124 + }, + { + "epoch": 0.3037667071688943, + "grad_norm": 0.8671007752418518, + "learning_rate": 5e-05, + "loss": 0.2204, + "step": 125 + }, + { + "epoch": 0.30619684082624543, + "grad_norm": 0.9986138939857483, + "learning_rate": 5e-05, + "loss": 0.1615, + "step": 126 + }, + { + "epoch": 0.3086269744835966, + "grad_norm": 0.8567970991134644, + "learning_rate": 5e-05, + "loss": 0.3127, + "step": 127 + }, + { + "epoch": 0.3110571081409477, + "grad_norm": 1.497078537940979, + "learning_rate": 5e-05, + "loss": 0.1461, + "step": 128 + }, + { + "epoch": 0.3134872417982989, + "grad_norm": 1.9854991436004639, + "learning_rate": 5e-05, + "loss": 0.1588, + "step": 129 + }, + { + "epoch": 0.3159173754556501, + "grad_norm": 0.9955502152442932, + "learning_rate": 5e-05, + "loss": 0.2703, + "step": 130 + }, + { + "epoch": 0.3183475091130012, + "grad_norm": 0.8443557620048523, + "learning_rate": 5e-05, + "loss": 0.1432, + "step": 131 + }, + { + "epoch": 0.32077764277035237, + "grad_norm": 0.8821367025375366, + "learning_rate": 5e-05, + "loss": 0.1113, + "step": 132 + }, + { + "epoch": 0.32320777642770354, + "grad_norm": 0.997931957244873, + "learning_rate": 5e-05, + "loss": 0.1363, + "step": 133 + }, + { + "epoch": 0.32563791008505466, + "grad_norm": 1.2628774642944336, + "learning_rate": 5e-05, + "loss": 0.134, + "step": 134 + }, + { + "epoch": 0.32806804374240583, + "grad_norm": 0.7694957852363586, + "learning_rate": 5e-05, + "loss": 0.1425, + "step": 135 + }, + { + "epoch": 0.330498177399757, + "grad_norm": 1.5881726741790771, + "learning_rate": 5e-05, + "loss": 0.17, + "step": 136 + }, + { + "epoch": 0.33292831105710813, + "grad_norm": 1.2904351949691772, + "learning_rate": 5e-05, + "loss": 0.1735, + "step": 137 + }, + { + "epoch": 0.3353584447144593, + "grad_norm": 1.3187663555145264, + "learning_rate": 5e-05, + "loss": 0.2035, + "step": 138 + }, + { + "epoch": 0.3377885783718105, + "grad_norm": 1.1753528118133545, + "learning_rate": 5e-05, + "loss": 0.2147, + "step": 139 + }, + { + "epoch": 0.3402187120291616, + "grad_norm": 1.6974895000457764, + "learning_rate": 5e-05, + "loss": 0.1303, + "step": 140 + }, + { + "epoch": 0.34264884568651277, + "grad_norm": 0.9945745468139648, + "learning_rate": 5e-05, + "loss": 0.1169, + "step": 141 + }, + { + "epoch": 0.3450789793438639, + "grad_norm": 1.0099682807922363, + "learning_rate": 5e-05, + "loss": 0.1446, + "step": 142 + }, + { + "epoch": 0.34750911300121506, + "grad_norm": 1.223264217376709, + "learning_rate": 5e-05, + "loss": 0.2118, + "step": 143 + }, + { + "epoch": 0.34993924665856624, + "grad_norm": 0.8575140833854675, + "learning_rate": 5e-05, + "loss": 0.1109, + "step": 144 + }, + { + "epoch": 0.35236938031591736, + "grad_norm": 1.0145286321640015, + "learning_rate": 5e-05, + "loss": 0.1664, + "step": 145 + }, + { + "epoch": 0.35479951397326853, + "grad_norm": 0.670891284942627, + "learning_rate": 5e-05, + "loss": 0.1166, + "step": 146 + }, + { + "epoch": 0.3572296476306197, + "grad_norm": 1.4332047700881958, + "learning_rate": 5e-05, + "loss": 0.1381, + "step": 147 + }, + { + "epoch": 0.3596597812879708, + "grad_norm": 1.0511322021484375, + "learning_rate": 5e-05, + "loss": 0.1279, + "step": 148 + }, + { + "epoch": 0.362089914945322, + "grad_norm": 0.751565158367157, + "learning_rate": 5e-05, + "loss": 0.1533, + "step": 149 + }, + { + "epoch": 0.3645200486026732, + "grad_norm": 1.612855076789856, + "learning_rate": 5e-05, + "loss": 0.1633, + "step": 150 + }, + { + "epoch": 0.3669501822600243, + "grad_norm": 1.2449802160263062, + "learning_rate": 5e-05, + "loss": 0.2037, + "step": 151 + }, + { + "epoch": 0.36938031591737547, + "grad_norm": 1.0060147047042847, + "learning_rate": 5e-05, + "loss": 0.0902, + "step": 152 + }, + { + "epoch": 0.3718104495747266, + "grad_norm": 0.9183961153030396, + "learning_rate": 5e-05, + "loss": 0.1748, + "step": 153 + }, + { + "epoch": 0.37424058323207776, + "grad_norm": 1.2824058532714844, + "learning_rate": 5e-05, + "loss": 0.1903, + "step": 154 + }, + { + "epoch": 0.37667071688942894, + "grad_norm": 0.524626612663269, + "learning_rate": 5e-05, + "loss": 0.1014, + "step": 155 + }, + { + "epoch": 0.37910085054678005, + "grad_norm": 0.946535587310791, + "learning_rate": 5e-05, + "loss": 0.2512, + "step": 156 + }, + { + "epoch": 0.38153098420413123, + "grad_norm": 0.8985329270362854, + "learning_rate": 5e-05, + "loss": 0.1247, + "step": 157 + }, + { + "epoch": 0.3839611178614824, + "grad_norm": 0.6025483012199402, + "learning_rate": 5e-05, + "loss": 0.0677, + "step": 158 + }, + { + "epoch": 0.3863912515188335, + "grad_norm": 0.9210860729217529, + "learning_rate": 5e-05, + "loss": 0.1361, + "step": 159 + }, + { + "epoch": 0.3888213851761847, + "grad_norm": 1.7067134380340576, + "learning_rate": 5e-05, + "loss": 0.2008, + "step": 160 + }, + { + "epoch": 0.39125151883353587, + "grad_norm": 0.8162620663642883, + "learning_rate": 5e-05, + "loss": 0.1108, + "step": 161 + }, + { + "epoch": 0.393681652490887, + "grad_norm": 0.535029947757721, + "learning_rate": 5e-05, + "loss": 0.1578, + "step": 162 + }, + { + "epoch": 0.39611178614823817, + "grad_norm": 0.7219657897949219, + "learning_rate": 5e-05, + "loss": 0.1012, + "step": 163 + }, + { + "epoch": 0.3985419198055893, + "grad_norm": 1.0636670589447021, + "learning_rate": 5e-05, + "loss": 0.1648, + "step": 164 + }, + { + "epoch": 0.40097205346294046, + "grad_norm": 1.1116470098495483, + "learning_rate": 5e-05, + "loss": 0.1191, + "step": 165 + }, + { + "epoch": 0.40340218712029163, + "grad_norm": 0.7233144044876099, + "learning_rate": 5e-05, + "loss": 0.1206, + "step": 166 + }, + { + "epoch": 0.40583232077764275, + "grad_norm": 1.7054411172866821, + "learning_rate": 5e-05, + "loss": 0.1466, + "step": 167 + }, + { + "epoch": 0.4082624544349939, + "grad_norm": 0.9031124114990234, + "learning_rate": 5e-05, + "loss": 0.1107, + "step": 168 + }, + { + "epoch": 0.4106925880923451, + "grad_norm": 0.8140673637390137, + "learning_rate": 5e-05, + "loss": 0.1503, + "step": 169 + }, + { + "epoch": 0.4131227217496962, + "grad_norm": 1.2189490795135498, + "learning_rate": 5e-05, + "loss": 0.1081, + "step": 170 + }, + { + "epoch": 0.4155528554070474, + "grad_norm": 0.8403185606002808, + "learning_rate": 5e-05, + "loss": 0.098, + "step": 171 + }, + { + "epoch": 0.41798298906439857, + "grad_norm": 0.8939706683158875, + "learning_rate": 5e-05, + "loss": 0.0961, + "step": 172 + }, + { + "epoch": 0.4204131227217497, + "grad_norm": 0.8737717270851135, + "learning_rate": 5e-05, + "loss": 0.1165, + "step": 173 + }, + { + "epoch": 0.42284325637910086, + "grad_norm": 0.9247780442237854, + "learning_rate": 5e-05, + "loss": 0.1787, + "step": 174 + }, + { + "epoch": 0.425273390036452, + "grad_norm": 0.9532423615455627, + "learning_rate": 5e-05, + "loss": 0.1676, + "step": 175 + }, + { + "epoch": 0.42770352369380316, + "grad_norm": 0.5811410546302795, + "learning_rate": 5e-05, + "loss": 0.1221, + "step": 176 + }, + { + "epoch": 0.43013365735115433, + "grad_norm": 0.6600822806358337, + "learning_rate": 5e-05, + "loss": 0.1809, + "step": 177 + }, + { + "epoch": 0.43256379100850545, + "grad_norm": 0.5638179183006287, + "learning_rate": 5e-05, + "loss": 0.0668, + "step": 178 + }, + { + "epoch": 0.4349939246658566, + "grad_norm": 0.5698412656784058, + "learning_rate": 5e-05, + "loss": 0.164, + "step": 179 + }, + { + "epoch": 0.4374240583232078, + "grad_norm": 1.2823748588562012, + "learning_rate": 5e-05, + "loss": 0.2111, + "step": 180 + }, + { + "epoch": 0.4398541919805589, + "grad_norm": 1.1044129133224487, + "learning_rate": 5e-05, + "loss": 0.1188, + "step": 181 + }, + { + "epoch": 0.4422843256379101, + "grad_norm": 0.9815372824668884, + "learning_rate": 5e-05, + "loss": 0.186, + "step": 182 + }, + { + "epoch": 0.44471445929526127, + "grad_norm": 0.8543856739997864, + "learning_rate": 5e-05, + "loss": 0.1237, + "step": 183 + }, + { + "epoch": 0.4471445929526124, + "grad_norm": 1.0749317407608032, + "learning_rate": 5e-05, + "loss": 0.1385, + "step": 184 + }, + { + "epoch": 0.44957472660996356, + "grad_norm": 1.4067716598510742, + "learning_rate": 5e-05, + "loss": 0.1269, + "step": 185 + }, + { + "epoch": 0.4520048602673147, + "grad_norm": 0.933139443397522, + "learning_rate": 5e-05, + "loss": 0.0476, + "step": 186 + }, + { + "epoch": 0.45443499392466585, + "grad_norm": 1.2160463333129883, + "learning_rate": 5e-05, + "loss": 0.0823, + "step": 187 + }, + { + "epoch": 0.456865127582017, + "grad_norm": 0.8305476903915405, + "learning_rate": 5e-05, + "loss": 0.2482, + "step": 188 + }, + { + "epoch": 0.45929526123936815, + "grad_norm": 0.8701503872871399, + "learning_rate": 5e-05, + "loss": 0.2007, + "step": 189 + }, + { + "epoch": 0.4617253948967193, + "grad_norm": 1.1040858030319214, + "learning_rate": 5e-05, + "loss": 0.1031, + "step": 190 + }, + { + "epoch": 0.4641555285540705, + "grad_norm": 1.149579644203186, + "learning_rate": 5e-05, + "loss": 0.1384, + "step": 191 + }, + { + "epoch": 0.4665856622114216, + "grad_norm": 1.0616861581802368, + "learning_rate": 5e-05, + "loss": 0.1961, + "step": 192 + }, + { + "epoch": 0.4690157958687728, + "grad_norm": 1.3416138887405396, + "learning_rate": 5e-05, + "loss": 0.2223, + "step": 193 + }, + { + "epoch": 0.47144592952612396, + "grad_norm": 1.2578601837158203, + "learning_rate": 5e-05, + "loss": 0.0833, + "step": 194 + }, + { + "epoch": 0.4738760631834751, + "grad_norm": 0.9381983876228333, + "learning_rate": 5e-05, + "loss": 0.135, + "step": 195 + }, + { + "epoch": 0.47630619684082626, + "grad_norm": 1.0591320991516113, + "learning_rate": 5e-05, + "loss": 0.1658, + "step": 196 + }, + { + "epoch": 0.4787363304981774, + "grad_norm": 0.9011105895042419, + "learning_rate": 5e-05, + "loss": 0.189, + "step": 197 + }, + { + "epoch": 0.48116646415552855, + "grad_norm": 0.9011105895042419, + "learning_rate": 5e-05, + "loss": 0.1729, + "step": 198 + }, + { + "epoch": 0.4835965978128797, + "grad_norm": 1.0387153625488281, + "learning_rate": 5e-05, + "loss": 0.1534, + "step": 199 + }, + { + "epoch": 0.48602673147023084, + "grad_norm": 1.7026396989822388, + "learning_rate": 5e-05, + "loss": 0.1534, + "step": 200 + }, + { + "epoch": 0.48602673147023084, + "eval_loss": 0.2694580554962158, + "eval_runtime": 510.8356, + "eval_samples_per_second": 5.379, + "eval_steps_per_second": 0.673, + "step": 200 + }, + { + "epoch": 0.488456865127582, + "grad_norm": 0.7090218663215637, + "learning_rate": 5e-05, + "loss": 0.1022, + "step": 201 + }, + { + "epoch": 0.4908869987849332, + "grad_norm": 0.6188836693763733, + "learning_rate": 5e-05, + "loss": 0.1839, + "step": 202 + }, + { + "epoch": 0.4933171324422843, + "grad_norm": 0.8001598715782166, + "learning_rate": 5e-05, + "loss": 0.1525, + "step": 203 + }, + { + "epoch": 0.4957472660996355, + "grad_norm": 1.006425142288208, + "learning_rate": 5e-05, + "loss": 0.1417, + "step": 204 + }, + { + "epoch": 0.49817739975698666, + "grad_norm": 0.9688583612442017, + "learning_rate": 5e-05, + "loss": 0.1504, + "step": 205 + }, + { + "epoch": 0.5006075334143378, + "grad_norm": 0.9707381129264832, + "learning_rate": 5e-05, + "loss": 0.0538, + "step": 206 + }, + { + "epoch": 0.503037667071689, + "grad_norm": 0.9181280136108398, + "learning_rate": 5e-05, + "loss": 0.2319, + "step": 207 + }, + { + "epoch": 0.5054678007290401, + "grad_norm": 1.059377670288086, + "learning_rate": 5e-05, + "loss": 0.2679, + "step": 208 + }, + { + "epoch": 0.5078979343863913, + "grad_norm": 0.6514914035797119, + "learning_rate": 5e-05, + "loss": 0.0623, + "step": 209 + }, + { + "epoch": 0.5103280680437424, + "grad_norm": 0.7265043258666992, + "learning_rate": 5e-05, + "loss": 0.1034, + "step": 210 + }, + { + "epoch": 0.5127582017010935, + "grad_norm": 1.5795567035675049, + "learning_rate": 5e-05, + "loss": 0.1236, + "step": 211 + }, + { + "epoch": 0.5151883353584447, + "grad_norm": 0.8533775806427002, + "learning_rate": 5e-05, + "loss": 0.0702, + "step": 212 + }, + { + "epoch": 0.5176184690157959, + "grad_norm": 0.7225982546806335, + "learning_rate": 5e-05, + "loss": 0.2005, + "step": 213 + }, + { + "epoch": 0.520048602673147, + "grad_norm": 0.9016891121864319, + "learning_rate": 5e-05, + "loss": 0.1329, + "step": 214 + }, + { + "epoch": 0.5224787363304981, + "grad_norm": 1.1623356342315674, + "learning_rate": 5e-05, + "loss": 0.1728, + "step": 215 + }, + { + "epoch": 0.5249088699878494, + "grad_norm": 1.1869186162948608, + "learning_rate": 5e-05, + "loss": 0.2517, + "step": 216 + }, + { + "epoch": 0.5273390036452005, + "grad_norm": 1.2271298170089722, + "learning_rate": 5e-05, + "loss": 0.1402, + "step": 217 + }, + { + "epoch": 0.5297691373025516, + "grad_norm": 1.182732105255127, + "learning_rate": 5e-05, + "loss": 0.1341, + "step": 218 + }, + { + "epoch": 0.5321992709599028, + "grad_norm": 0.8633968234062195, + "learning_rate": 5e-05, + "loss": 0.1469, + "step": 219 + }, + { + "epoch": 0.534629404617254, + "grad_norm": 0.9293995499610901, + "learning_rate": 5e-05, + "loss": 0.1121, + "step": 220 + }, + { + "epoch": 0.5370595382746051, + "grad_norm": 1.3438748121261597, + "learning_rate": 5e-05, + "loss": 0.1718, + "step": 221 + }, + { + "epoch": 0.5394896719319563, + "grad_norm": 1.2411259412765503, + "learning_rate": 5e-05, + "loss": 0.1304, + "step": 222 + }, + { + "epoch": 0.5419198055893074, + "grad_norm": 0.8153496384620667, + "learning_rate": 5e-05, + "loss": 0.1334, + "step": 223 + }, + { + "epoch": 0.5443499392466585, + "grad_norm": 0.7595478892326355, + "learning_rate": 5e-05, + "loss": 0.0994, + "step": 224 + }, + { + "epoch": 0.5467800729040098, + "grad_norm": 1.0710370540618896, + "learning_rate": 5e-05, + "loss": 0.0351, + "step": 225 + }, + { + "epoch": 0.5492102065613609, + "grad_norm": 1.2209466695785522, + "learning_rate": 5e-05, + "loss": 0.1049, + "step": 226 + }, + { + "epoch": 0.551640340218712, + "grad_norm": 0.5865522623062134, + "learning_rate": 5e-05, + "loss": 0.0948, + "step": 227 + }, + { + "epoch": 0.5540704738760632, + "grad_norm": 0.8379694223403931, + "learning_rate": 5e-05, + "loss": 0.1269, + "step": 228 + }, + { + "epoch": 0.5565006075334143, + "grad_norm": 0.7321844100952148, + "learning_rate": 5e-05, + "loss": 0.0912, + "step": 229 + }, + { + "epoch": 0.5589307411907655, + "grad_norm": 1.0253808498382568, + "learning_rate": 5e-05, + "loss": 0.0811, + "step": 230 + }, + { + "epoch": 0.5613608748481167, + "grad_norm": 0.9015201330184937, + "learning_rate": 5e-05, + "loss": 0.0725, + "step": 231 + }, + { + "epoch": 0.5637910085054678, + "grad_norm": 0.8760166168212891, + "learning_rate": 5e-05, + "loss": 0.1727, + "step": 232 + }, + { + "epoch": 0.5662211421628189, + "grad_norm": 0.892677903175354, + "learning_rate": 5e-05, + "loss": 0.084, + "step": 233 + }, + { + "epoch": 0.56865127582017, + "grad_norm": 0.6722489595413208, + "learning_rate": 5e-05, + "loss": 0.0845, + "step": 234 + }, + { + "epoch": 0.5710814094775213, + "grad_norm": 0.6187300682067871, + "learning_rate": 5e-05, + "loss": 0.1269, + "step": 235 + }, + { + "epoch": 0.5735115431348724, + "grad_norm": 0.8674693703651428, + "learning_rate": 5e-05, + "loss": 0.09, + "step": 236 + }, + { + "epoch": 0.5759416767922235, + "grad_norm": 0.9376964569091797, + "learning_rate": 5e-05, + "loss": 0.1916, + "step": 237 + }, + { + "epoch": 0.5783718104495748, + "grad_norm": 0.6777618527412415, + "learning_rate": 5e-05, + "loss": 0.1321, + "step": 238 + }, + { + "epoch": 0.5808019441069259, + "grad_norm": 1.1223134994506836, + "learning_rate": 5e-05, + "loss": 0.1811, + "step": 239 + }, + { + "epoch": 0.583232077764277, + "grad_norm": 0.8348031044006348, + "learning_rate": 5e-05, + "loss": 0.1508, + "step": 240 + }, + { + "epoch": 0.5856622114216282, + "grad_norm": 0.7387763857841492, + "learning_rate": 5e-05, + "loss": 0.058, + "step": 241 + }, + { + "epoch": 0.5880923450789793, + "grad_norm": 0.6327199339866638, + "learning_rate": 5e-05, + "loss": 0.1096, + "step": 242 + }, + { + "epoch": 0.5905224787363305, + "grad_norm": 0.7040870785713196, + "learning_rate": 5e-05, + "loss": 0.1281, + "step": 243 + }, + { + "epoch": 0.5929526123936817, + "grad_norm": 1.2110347747802734, + "learning_rate": 5e-05, + "loss": 0.1959, + "step": 244 + }, + { + "epoch": 0.5953827460510328, + "grad_norm": 0.8480271100997925, + "learning_rate": 5e-05, + "loss": 0.1234, + "step": 245 + }, + { + "epoch": 0.5978128797083839, + "grad_norm": 1.0009665489196777, + "learning_rate": 5e-05, + "loss": 0.1223, + "step": 246 + }, + { + "epoch": 0.6002430133657352, + "grad_norm": 0.7057176828384399, + "learning_rate": 5e-05, + "loss": 0.1747, + "step": 247 + }, + { + "epoch": 0.6026731470230863, + "grad_norm": 0.9945976138114929, + "learning_rate": 5e-05, + "loss": 0.1766, + "step": 248 + }, + { + "epoch": 0.6051032806804374, + "grad_norm": 0.7461872696876526, + "learning_rate": 5e-05, + "loss": 0.1224, + "step": 249 + }, + { + "epoch": 0.6075334143377886, + "grad_norm": 0.5109842419624329, + "learning_rate": 5e-05, + "loss": 0.1105, + "step": 250 + }, + { + "epoch": 0.6099635479951397, + "grad_norm": 0.7398750185966492, + "learning_rate": 5e-05, + "loss": 0.139, + "step": 251 + }, + { + "epoch": 0.6123936816524909, + "grad_norm": 0.7645841836929321, + "learning_rate": 5e-05, + "loss": 0.11, + "step": 252 + }, + { + "epoch": 0.6148238153098421, + "grad_norm": 0.8627939224243164, + "learning_rate": 5e-05, + "loss": 0.1277, + "step": 253 + }, + { + "epoch": 0.6172539489671932, + "grad_norm": 0.6933022737503052, + "learning_rate": 5e-05, + "loss": 0.1283, + "step": 254 + }, + { + "epoch": 0.6196840826245443, + "grad_norm": 0.8254914283752441, + "learning_rate": 5e-05, + "loss": 0.1087, + "step": 255 + }, + { + "epoch": 0.6221142162818954, + "grad_norm": 0.9561761617660522, + "learning_rate": 5e-05, + "loss": 0.1019, + "step": 256 + }, + { + "epoch": 0.6245443499392467, + "grad_norm": 1.0569480657577515, + "learning_rate": 5e-05, + "loss": 0.1235, + "step": 257 + }, + { + "epoch": 0.6269744835965978, + "grad_norm": 1.0708096027374268, + "learning_rate": 5e-05, + "loss": 0.1797, + "step": 258 + }, + { + "epoch": 0.6294046172539489, + "grad_norm": 0.5817265510559082, + "learning_rate": 5e-05, + "loss": 0.0787, + "step": 259 + }, + { + "epoch": 0.6318347509113001, + "grad_norm": 0.8255194425582886, + "learning_rate": 5e-05, + "loss": 0.1754, + "step": 260 + }, + { + "epoch": 0.6342648845686513, + "grad_norm": 0.8279481530189514, + "learning_rate": 5e-05, + "loss": 0.1668, + "step": 261 + }, + { + "epoch": 0.6366950182260024, + "grad_norm": 0.6393010020256042, + "learning_rate": 5e-05, + "loss": 0.0756, + "step": 262 + }, + { + "epoch": 0.6391251518833536, + "grad_norm": 0.49387434124946594, + "learning_rate": 5e-05, + "loss": 0.0757, + "step": 263 + }, + { + "epoch": 0.6415552855407047, + "grad_norm": 0.5705033540725708, + "learning_rate": 5e-05, + "loss": 0.0949, + "step": 264 + }, + { + "epoch": 0.6439854191980559, + "grad_norm": 0.9022333025932312, + "learning_rate": 5e-05, + "loss": 0.1716, + "step": 265 + }, + { + "epoch": 0.6464155528554071, + "grad_norm": 0.5299041271209717, + "learning_rate": 5e-05, + "loss": 0.0799, + "step": 266 + }, + { + "epoch": 0.6488456865127582, + "grad_norm": 0.7919138669967651, + "learning_rate": 5e-05, + "loss": 0.1476, + "step": 267 + }, + { + "epoch": 0.6512758201701093, + "grad_norm": 1.394972324371338, + "learning_rate": 5e-05, + "loss": 0.112, + "step": 268 + }, + { + "epoch": 0.6537059538274606, + "grad_norm": 0.9046385884284973, + "learning_rate": 5e-05, + "loss": 0.1683, + "step": 269 + }, + { + "epoch": 0.6561360874848117, + "grad_norm": 0.7190728783607483, + "learning_rate": 5e-05, + "loss": 0.1434, + "step": 270 + }, + { + "epoch": 0.6585662211421628, + "grad_norm": 1.0634692907333374, + "learning_rate": 5e-05, + "loss": 0.1244, + "step": 271 + }, + { + "epoch": 0.660996354799514, + "grad_norm": 0.569398045539856, + "learning_rate": 5e-05, + "loss": 0.0728, + "step": 272 + }, + { + "epoch": 0.6634264884568651, + "grad_norm": 0.9083871841430664, + "learning_rate": 5e-05, + "loss": 0.1467, + "step": 273 + }, + { + "epoch": 0.6658566221142163, + "grad_norm": 0.7927613854408264, + "learning_rate": 5e-05, + "loss": 0.1832, + "step": 274 + }, + { + "epoch": 0.6682867557715675, + "grad_norm": 1.0088517665863037, + "learning_rate": 5e-05, + "loss": 0.1422, + "step": 275 + }, + { + "epoch": 0.6707168894289186, + "grad_norm": 0.5374720692634583, + "learning_rate": 5e-05, + "loss": 0.0971, + "step": 276 + }, + { + "epoch": 0.6731470230862697, + "grad_norm": 0.7200155854225159, + "learning_rate": 5e-05, + "loss": 0.0722, + "step": 277 + }, + { + "epoch": 0.675577156743621, + "grad_norm": 0.7518504858016968, + "learning_rate": 5e-05, + "loss": 0.126, + "step": 278 + }, + { + "epoch": 0.6780072904009721, + "grad_norm": 0.8544089794158936, + "learning_rate": 5e-05, + "loss": 0.113, + "step": 279 + }, + { + "epoch": 0.6804374240583232, + "grad_norm": 0.7537454962730408, + "learning_rate": 5e-05, + "loss": 0.0615, + "step": 280 + }, + { + "epoch": 0.6828675577156743, + "grad_norm": 0.8440731763839722, + "learning_rate": 5e-05, + "loss": 0.0933, + "step": 281 + }, + { + "epoch": 0.6852976913730255, + "grad_norm": 1.7643004655838013, + "learning_rate": 5e-05, + "loss": 0.1807, + "step": 282 + }, + { + "epoch": 0.6877278250303767, + "grad_norm": 0.6502506732940674, + "learning_rate": 5e-05, + "loss": 0.1189, + "step": 283 + }, + { + "epoch": 0.6901579586877278, + "grad_norm": 0.8678569197654724, + "learning_rate": 5e-05, + "loss": 0.094, + "step": 284 + }, + { + "epoch": 0.692588092345079, + "grad_norm": 0.8592473864555359, + "learning_rate": 5e-05, + "loss": 0.1197, + "step": 285 + }, + { + "epoch": 0.6950182260024301, + "grad_norm": 0.8664891719818115, + "learning_rate": 5e-05, + "loss": 0.0575, + "step": 286 + }, + { + "epoch": 0.6974483596597812, + "grad_norm": 0.5715927481651306, + "learning_rate": 5e-05, + "loss": 0.0668, + "step": 287 + }, + { + "epoch": 0.6998784933171325, + "grad_norm": 1.0751949548721313, + "learning_rate": 5e-05, + "loss": 0.1457, + "step": 288 + }, + { + "epoch": 0.7023086269744836, + "grad_norm": 0.8039306402206421, + "learning_rate": 5e-05, + "loss": 0.1206, + "step": 289 + }, + { + "epoch": 0.7047387606318347, + "grad_norm": 1.5916086435317993, + "learning_rate": 5e-05, + "loss": 0.1471, + "step": 290 + }, + { + "epoch": 0.707168894289186, + "grad_norm": 0.7829633355140686, + "learning_rate": 5e-05, + "loss": 0.106, + "step": 291 + }, + { + "epoch": 0.7095990279465371, + "grad_norm": 0.8277871608734131, + "learning_rate": 5e-05, + "loss": 0.0347, + "step": 292 + }, + { + "epoch": 0.7120291616038882, + "grad_norm": 0.9798584580421448, + "learning_rate": 5e-05, + "loss": 0.1231, + "step": 293 + }, + { + "epoch": 0.7144592952612394, + "grad_norm": 0.6273385286331177, + "learning_rate": 5e-05, + "loss": 0.0838, + "step": 294 + }, + { + "epoch": 0.7168894289185905, + "grad_norm": 1.2111278772354126, + "learning_rate": 5e-05, + "loss": 0.1971, + "step": 295 + }, + { + "epoch": 0.7193195625759417, + "grad_norm": 0.5927327275276184, + "learning_rate": 5e-05, + "loss": 0.1198, + "step": 296 + }, + { + "epoch": 0.7217496962332929, + "grad_norm": 0.672064483165741, + "learning_rate": 5e-05, + "loss": 0.1295, + "step": 297 + }, + { + "epoch": 0.724179829890644, + "grad_norm": 0.6398962736129761, + "learning_rate": 5e-05, + "loss": 0.0933, + "step": 298 + }, + { + "epoch": 0.7266099635479951, + "grad_norm": 0.7458367943763733, + "learning_rate": 5e-05, + "loss": 0.1407, + "step": 299 + }, + { + "epoch": 0.7290400972053463, + "grad_norm": 0.6593539118766785, + "learning_rate": 5e-05, + "loss": 0.1055, + "step": 300 + }, + { + "epoch": 0.7290400972053463, + "eval_loss": 0.24557192623615265, + "eval_runtime": 513.4098, + "eval_samples_per_second": 5.352, + "eval_steps_per_second": 0.67, + "step": 300 + }, + { + "epoch": 0.7314702308626975, + "grad_norm": 0.5555610656738281, + "learning_rate": 5e-05, + "loss": 0.1641, + "step": 301 + }, + { + "epoch": 0.7339003645200486, + "grad_norm": 0.49084270000457764, + "learning_rate": 5e-05, + "loss": 0.0786, + "step": 302 + }, + { + "epoch": 0.7363304981773997, + "grad_norm": 0.9267096519470215, + "learning_rate": 5e-05, + "loss": 0.2053, + "step": 303 + }, + { + "epoch": 0.7387606318347509, + "grad_norm": 0.440594345331192, + "learning_rate": 5e-05, + "loss": 0.1224, + "step": 304 + }, + { + "epoch": 0.741190765492102, + "grad_norm": 1.0237218141555786, + "learning_rate": 5e-05, + "loss": 0.1263, + "step": 305 + }, + { + "epoch": 0.7436208991494532, + "grad_norm": 1.0087871551513672, + "learning_rate": 5e-05, + "loss": 0.1218, + "step": 306 + }, + { + "epoch": 0.7460510328068044, + "grad_norm": 0.8280074000358582, + "learning_rate": 5e-05, + "loss": 0.1851, + "step": 307 + }, + { + "epoch": 0.7484811664641555, + "grad_norm": 0.681713342666626, + "learning_rate": 5e-05, + "loss": 0.1757, + "step": 308 + }, + { + "epoch": 0.7509113001215066, + "grad_norm": 4.372740268707275, + "learning_rate": 5e-05, + "loss": 0.104, + "step": 309 + }, + { + "epoch": 0.7533414337788579, + "grad_norm": 0.8002400398254395, + "learning_rate": 5e-05, + "loss": 0.0922, + "step": 310 + }, + { + "epoch": 0.755771567436209, + "grad_norm": 0.7685486674308777, + "learning_rate": 5e-05, + "loss": 0.1391, + "step": 311 + }, + { + "epoch": 0.7582017010935601, + "grad_norm": 1.282363772392273, + "learning_rate": 5e-05, + "loss": 0.1141, + "step": 312 + }, + { + "epoch": 0.7606318347509113, + "grad_norm": 0.9099876284599304, + "learning_rate": 5e-05, + "loss": 0.1396, + "step": 313 + }, + { + "epoch": 0.7630619684082625, + "grad_norm": 0.5911856293678284, + "learning_rate": 5e-05, + "loss": 0.0631, + "step": 314 + }, + { + "epoch": 0.7654921020656136, + "grad_norm": 0.7148822546005249, + "learning_rate": 5e-05, + "loss": 0.1714, + "step": 315 + }, + { + "epoch": 0.7679222357229648, + "grad_norm": 0.6756410002708435, + "learning_rate": 5e-05, + "loss": 0.0894, + "step": 316 + }, + { + "epoch": 0.7703523693803159, + "grad_norm": 0.8938581347465515, + "learning_rate": 5e-05, + "loss": 0.1538, + "step": 317 + }, + { + "epoch": 0.772782503037667, + "grad_norm": 1.061012625694275, + "learning_rate": 5e-05, + "loss": 0.1069, + "step": 318 + }, + { + "epoch": 0.7752126366950183, + "grad_norm": 0.8262349963188171, + "learning_rate": 5e-05, + "loss": 0.0745, + "step": 319 + }, + { + "epoch": 0.7776427703523694, + "grad_norm": 1.3609825372695923, + "learning_rate": 5e-05, + "loss": 0.0689, + "step": 320 + }, + { + "epoch": 0.7800729040097205, + "grad_norm": 1.2151660919189453, + "learning_rate": 5e-05, + "loss": 0.0863, + "step": 321 + }, + { + "epoch": 0.7825030376670717, + "grad_norm": 0.7165310978889465, + "learning_rate": 5e-05, + "loss": 0.084, + "step": 322 + }, + { + "epoch": 0.7849331713244229, + "grad_norm": 1.2151451110839844, + "learning_rate": 5e-05, + "loss": 0.1229, + "step": 323 + }, + { + "epoch": 0.787363304981774, + "grad_norm": 0.9600043296813965, + "learning_rate": 5e-05, + "loss": 0.0524, + "step": 324 + }, + { + "epoch": 0.7897934386391251, + "grad_norm": 1.1785893440246582, + "learning_rate": 5e-05, + "loss": 0.1635, + "step": 325 + }, + { + "epoch": 0.7922235722964763, + "grad_norm": 0.6502353549003601, + "learning_rate": 5e-05, + "loss": 0.103, + "step": 326 + }, + { + "epoch": 0.7946537059538274, + "grad_norm": 0.6894869208335876, + "learning_rate": 5e-05, + "loss": 0.0824, + "step": 327 + }, + { + "epoch": 0.7970838396111786, + "grad_norm": 0.8827742338180542, + "learning_rate": 5e-05, + "loss": 0.0899, + "step": 328 + }, + { + "epoch": 0.7995139732685298, + "grad_norm": 0.82513827085495, + "learning_rate": 5e-05, + "loss": 0.0642, + "step": 329 + }, + { + "epoch": 0.8019441069258809, + "grad_norm": 0.7896872758865356, + "learning_rate": 5e-05, + "loss": 0.1869, + "step": 330 + }, + { + "epoch": 0.804374240583232, + "grad_norm": 1.421380639076233, + "learning_rate": 5e-05, + "loss": 0.0668, + "step": 331 + }, + { + "epoch": 0.8068043742405833, + "grad_norm": 0.826987087726593, + "learning_rate": 5e-05, + "loss": 0.1543, + "step": 332 + }, + { + "epoch": 0.8092345078979344, + "grad_norm": 0.7504023909568787, + "learning_rate": 5e-05, + "loss": 0.1167, + "step": 333 + }, + { + "epoch": 0.8116646415552855, + "grad_norm": 0.9217634797096252, + "learning_rate": 5e-05, + "loss": 0.0801, + "step": 334 + }, + { + "epoch": 0.8140947752126367, + "grad_norm": 0.6423681974411011, + "learning_rate": 5e-05, + "loss": 0.0445, + "step": 335 + }, + { + "epoch": 0.8165249088699879, + "grad_norm": 0.3945442736148834, + "learning_rate": 5e-05, + "loss": 0.0737, + "step": 336 + }, + { + "epoch": 0.818955042527339, + "grad_norm": 0.6396763324737549, + "learning_rate": 5e-05, + "loss": 0.1118, + "step": 337 + }, + { + "epoch": 0.8213851761846902, + "grad_norm": 1.31553053855896, + "learning_rate": 5e-05, + "loss": 0.0989, + "step": 338 + }, + { + "epoch": 0.8238153098420413, + "grad_norm": 0.962189793586731, + "learning_rate": 5e-05, + "loss": 0.1043, + "step": 339 + }, + { + "epoch": 0.8262454434993924, + "grad_norm": 0.9305171370506287, + "learning_rate": 5e-05, + "loss": 0.1333, + "step": 340 + }, + { + "epoch": 0.8286755771567437, + "grad_norm": 0.6102299690246582, + "learning_rate": 5e-05, + "loss": 0.0743, + "step": 341 + }, + { + "epoch": 0.8311057108140948, + "grad_norm": 0.9117159247398376, + "learning_rate": 5e-05, + "loss": 0.0399, + "step": 342 + }, + { + "epoch": 0.8335358444714459, + "grad_norm": 0.614378035068512, + "learning_rate": 5e-05, + "loss": 0.0767, + "step": 343 + }, + { + "epoch": 0.8359659781287971, + "grad_norm": 0.934684157371521, + "learning_rate": 5e-05, + "loss": 0.0814, + "step": 344 + }, + { + "epoch": 0.8383961117861483, + "grad_norm": 0.8567538261413574, + "learning_rate": 5e-05, + "loss": 0.0866, + "step": 345 + }, + { + "epoch": 0.8408262454434994, + "grad_norm": 1.083308219909668, + "learning_rate": 5e-05, + "loss": 0.0534, + "step": 346 + }, + { + "epoch": 0.8432563791008505, + "grad_norm": 0.8664528727531433, + "learning_rate": 5e-05, + "loss": 0.1348, + "step": 347 + }, + { + "epoch": 0.8456865127582017, + "grad_norm": 0.7151762843132019, + "learning_rate": 5e-05, + "loss": 0.0615, + "step": 348 + }, + { + "epoch": 0.8481166464155528, + "grad_norm": 0.784515917301178, + "learning_rate": 5e-05, + "loss": 0.1014, + "step": 349 + }, + { + "epoch": 0.850546780072904, + "grad_norm": 0.7713536620140076, + "learning_rate": 5e-05, + "loss": 0.0684, + "step": 350 + }, + { + "epoch": 0.8529769137302552, + "grad_norm": 0.8839780688285828, + "learning_rate": 5e-05, + "loss": 0.2673, + "step": 351 + }, + { + "epoch": 0.8554070473876063, + "grad_norm": 0.6095230579376221, + "learning_rate": 5e-05, + "loss": 0.1667, + "step": 352 + }, + { + "epoch": 0.8578371810449574, + "grad_norm": 1.1133002042770386, + "learning_rate": 5e-05, + "loss": 0.1746, + "step": 353 + }, + { + "epoch": 0.8602673147023087, + "grad_norm": 0.5206813812255859, + "learning_rate": 5e-05, + "loss": 0.1507, + "step": 354 + }, + { + "epoch": 0.8626974483596598, + "grad_norm": 0.6527222990989685, + "learning_rate": 5e-05, + "loss": 0.1045, + "step": 355 + }, + { + "epoch": 0.8651275820170109, + "grad_norm": 0.6029911637306213, + "learning_rate": 5e-05, + "loss": 0.1157, + "step": 356 + }, + { + "epoch": 0.8675577156743621, + "grad_norm": 0.7916355729103088, + "learning_rate": 5e-05, + "loss": 0.0817, + "step": 357 + }, + { + "epoch": 0.8699878493317132, + "grad_norm": 0.6410179734230042, + "learning_rate": 5e-05, + "loss": 0.0707, + "step": 358 + }, + { + "epoch": 0.8724179829890644, + "grad_norm": 1.0566637516021729, + "learning_rate": 5e-05, + "loss": 0.0774, + "step": 359 + }, + { + "epoch": 0.8748481166464156, + "grad_norm": 0.6340796947479248, + "learning_rate": 5e-05, + "loss": 0.1003, + "step": 360 + }, + { + "epoch": 0.8772782503037667, + "grad_norm": 0.6086530089378357, + "learning_rate": 5e-05, + "loss": 0.0441, + "step": 361 + }, + { + "epoch": 0.8797083839611178, + "grad_norm": 0.8955773115158081, + "learning_rate": 5e-05, + "loss": 0.1389, + "step": 362 + }, + { + "epoch": 0.8821385176184691, + "grad_norm": 0.4679882228374481, + "learning_rate": 5e-05, + "loss": 0.0708, + "step": 363 + }, + { + "epoch": 0.8845686512758202, + "grad_norm": 0.6539987921714783, + "learning_rate": 5e-05, + "loss": 0.1078, + "step": 364 + }, + { + "epoch": 0.8869987849331713, + "grad_norm": 0.8322189450263977, + "learning_rate": 5e-05, + "loss": 0.0477, + "step": 365 + }, + { + "epoch": 0.8894289185905225, + "grad_norm": 0.5329148769378662, + "learning_rate": 5e-05, + "loss": 0.0638, + "step": 366 + }, + { + "epoch": 0.8918590522478737, + "grad_norm": 0.7909948825836182, + "learning_rate": 5e-05, + "loss": 0.0941, + "step": 367 + }, + { + "epoch": 0.8942891859052248, + "grad_norm": 0.6703280806541443, + "learning_rate": 5e-05, + "loss": 0.0989, + "step": 368 + }, + { + "epoch": 0.8967193195625759, + "grad_norm": 0.6218695640563965, + "learning_rate": 5e-05, + "loss": 0.0712, + "step": 369 + }, + { + "epoch": 0.8991494532199271, + "grad_norm": 1.6482758522033691, + "learning_rate": 5e-05, + "loss": 0.1537, + "step": 370 + }, + { + "epoch": 0.9015795868772782, + "grad_norm": 0.7616936564445496, + "learning_rate": 5e-05, + "loss": 0.1234, + "step": 371 + }, + { + "epoch": 0.9040097205346294, + "grad_norm": 0.7617467045783997, + "learning_rate": 5e-05, + "loss": 0.0895, + "step": 372 + }, + { + "epoch": 0.9064398541919806, + "grad_norm": 0.7406112551689148, + "learning_rate": 5e-05, + "loss": 0.0559, + "step": 373 + }, + { + "epoch": 0.9088699878493317, + "grad_norm": 1.1627577543258667, + "learning_rate": 5e-05, + "loss": 0.0807, + "step": 374 + }, + { + "epoch": 0.9113001215066828, + "grad_norm": 0.6091077923774719, + "learning_rate": 5e-05, + "loss": 0.0649, + "step": 375 + }, + { + "epoch": 0.913730255164034, + "grad_norm": 0.8935027122497559, + "learning_rate": 5e-05, + "loss": 0.1563, + "step": 376 + }, + { + "epoch": 0.9161603888213852, + "grad_norm": 0.8287168741226196, + "learning_rate": 5e-05, + "loss": 0.0725, + "step": 377 + }, + { + "epoch": 0.9185905224787363, + "grad_norm": 0.5430625677108765, + "learning_rate": 5e-05, + "loss": 0.0984, + "step": 378 + }, + { + "epoch": 0.9210206561360875, + "grad_norm": 0.7538096308708191, + "learning_rate": 5e-05, + "loss": 0.1231, + "step": 379 + }, + { + "epoch": 0.9234507897934386, + "grad_norm": 0.9483656287193298, + "learning_rate": 5e-05, + "loss": 0.1341, + "step": 380 + }, + { + "epoch": 0.9258809234507898, + "grad_norm": 0.5689654350280762, + "learning_rate": 5e-05, + "loss": 0.1116, + "step": 381 + }, + { + "epoch": 0.928311057108141, + "grad_norm": 0.987369179725647, + "learning_rate": 5e-05, + "loss": 0.1815, + "step": 382 + }, + { + "epoch": 0.9307411907654921, + "grad_norm": 0.872460126876831, + "learning_rate": 5e-05, + "loss": 0.0994, + "step": 383 + }, + { + "epoch": 0.9331713244228432, + "grad_norm": 0.785040020942688, + "learning_rate": 5e-05, + "loss": 0.1006, + "step": 384 + }, + { + "epoch": 0.9356014580801945, + "grad_norm": 0.6228112578392029, + "learning_rate": 5e-05, + "loss": 0.0805, + "step": 385 + }, + { + "epoch": 0.9380315917375456, + "grad_norm": 0.746306836605072, + "learning_rate": 5e-05, + "loss": 0.0672, + "step": 386 + }, + { + "epoch": 0.9404617253948967, + "grad_norm": 0.44906482100486755, + "learning_rate": 5e-05, + "loss": 0.0831, + "step": 387 + }, + { + "epoch": 0.9428918590522479, + "grad_norm": 0.7880069613456726, + "learning_rate": 5e-05, + "loss": 0.0871, + "step": 388 + }, + { + "epoch": 0.945321992709599, + "grad_norm": 0.7579517960548401, + "learning_rate": 5e-05, + "loss": 0.1306, + "step": 389 + }, + { + "epoch": 0.9477521263669502, + "grad_norm": 0.5668421387672424, + "learning_rate": 5e-05, + "loss": 0.0738, + "step": 390 + }, + { + "epoch": 0.9501822600243013, + "grad_norm": 0.8249059319496155, + "learning_rate": 5e-05, + "loss": 0.069, + "step": 391 + }, + { + "epoch": 0.9526123936816525, + "grad_norm": 0.7063718438148499, + "learning_rate": 5e-05, + "loss": 0.0488, + "step": 392 + }, + { + "epoch": 0.9550425273390036, + "grad_norm": 0.9006249904632568, + "learning_rate": 5e-05, + "loss": 0.029, + "step": 393 + }, + { + "epoch": 0.9574726609963548, + "grad_norm": 0.5778633952140808, + "learning_rate": 5e-05, + "loss": 0.0859, + "step": 394 + }, + { + "epoch": 0.959902794653706, + "grad_norm": 0.9541341066360474, + "learning_rate": 5e-05, + "loss": 0.0698, + "step": 395 + }, + { + "epoch": 0.9623329283110571, + "grad_norm": 0.7825549244880676, + "learning_rate": 5e-05, + "loss": 0.0714, + "step": 396 + }, + { + "epoch": 0.9647630619684082, + "grad_norm": 0.729721188545227, + "learning_rate": 5e-05, + "loss": 0.0651, + "step": 397 + }, + { + "epoch": 0.9671931956257594, + "grad_norm": 0.7052625417709351, + "learning_rate": 5e-05, + "loss": 0.1518, + "step": 398 + }, + { + "epoch": 0.9696233292831106, + "grad_norm": 2.3800599575042725, + "learning_rate": 5e-05, + "loss": 0.0919, + "step": 399 + }, + { + "epoch": 0.9720534629404617, + "grad_norm": 0.5317090749740601, + "learning_rate": 5e-05, + "loss": 0.0828, + "step": 400 + }, + { + "epoch": 0.9720534629404617, + "eval_loss": 0.2335556447505951, + "eval_runtime": 513.3338, + "eval_samples_per_second": 5.353, + "eval_steps_per_second": 0.67, + "step": 400 + }, + { + "epoch": 0.9744835965978129, + "grad_norm": 0.6768686771392822, + "learning_rate": 5e-05, + "loss": 0.1675, + "step": 401 + }, + { + "epoch": 0.976913730255164, + "grad_norm": 0.5037873983383179, + "learning_rate": 5e-05, + "loss": 0.0199, + "step": 402 + }, + { + "epoch": 0.9793438639125152, + "grad_norm": 1.7766000032424927, + "learning_rate": 5e-05, + "loss": 0.0846, + "step": 403 + }, + { + "epoch": 0.9817739975698664, + "grad_norm": 0.978345513343811, + "learning_rate": 5e-05, + "loss": 0.0593, + "step": 404 + }, + { + "epoch": 0.9842041312272175, + "grad_norm": 1.4286214113235474, + "learning_rate": 5e-05, + "loss": 0.1963, + "step": 405 + }, + { + "epoch": 0.9866342648845686, + "grad_norm": 1.1499220132827759, + "learning_rate": 5e-05, + "loss": 0.0786, + "step": 406 + }, + { + "epoch": 0.9890643985419199, + "grad_norm": 1.4914474487304688, + "learning_rate": 5e-05, + "loss": 0.1152, + "step": 407 + }, + { + "epoch": 0.991494532199271, + "grad_norm": 0.7232803106307983, + "learning_rate": 5e-05, + "loss": 0.1338, + "step": 408 + }, + { + "epoch": 0.9939246658566221, + "grad_norm": 0.8456845283508301, + "learning_rate": 5e-05, + "loss": 0.0631, + "step": 409 + }, + { + "epoch": 0.9963547995139733, + "grad_norm": 0.5165804028511047, + "learning_rate": 5e-05, + "loss": 0.0725, + "step": 410 + }, + { + "epoch": 0.9987849331713244, + "grad_norm": 0.8364010453224182, + "learning_rate": 5e-05, + "loss": 0.1215, + "step": 411 + }, + { + "epoch": 1.0012150668286757, + "grad_norm": 0.6961699724197388, + "learning_rate": 5e-05, + "loss": 0.0327, + "step": 412 + }, + { + "epoch": 1.0036452004860268, + "grad_norm": 0.9358980655670166, + "learning_rate": 5e-05, + "loss": 0.0724, + "step": 413 + }, + { + "epoch": 1.006075334143378, + "grad_norm": 0.641076922416687, + "learning_rate": 5e-05, + "loss": 0.0824, + "step": 414 + }, + { + "epoch": 1.008505467800729, + "grad_norm": 0.6918608546257019, + "learning_rate": 5e-05, + "loss": 0.083, + "step": 415 + }, + { + "epoch": 1.0109356014580801, + "grad_norm": 0.7481106519699097, + "learning_rate": 5e-05, + "loss": 0.0887, + "step": 416 + }, + { + "epoch": 1.0133657351154313, + "grad_norm": 0.7577409148216248, + "learning_rate": 5e-05, + "loss": 0.0684, + "step": 417 + }, + { + "epoch": 1.0157958687727826, + "grad_norm": 1.0815515518188477, + "learning_rate": 5e-05, + "loss": 0.1167, + "step": 418 + }, + { + "epoch": 1.0182260024301337, + "grad_norm": 1.248225450515747, + "learning_rate": 5e-05, + "loss": 0.0644, + "step": 419 + }, + { + "epoch": 1.0206561360874848, + "grad_norm": 0.7970739006996155, + "learning_rate": 5e-05, + "loss": 0.1712, + "step": 420 + }, + { + "epoch": 1.023086269744836, + "grad_norm": 1.343601942062378, + "learning_rate": 5e-05, + "loss": 0.1359, + "step": 421 + }, + { + "epoch": 1.025516403402187, + "grad_norm": 0.705987274646759, + "learning_rate": 5e-05, + "loss": 0.0961, + "step": 422 + }, + { + "epoch": 1.0279465370595382, + "grad_norm": 0.5520442724227905, + "learning_rate": 5e-05, + "loss": 0.0839, + "step": 423 + }, + { + "epoch": 1.0303766707168893, + "grad_norm": 0.5799084305763245, + "learning_rate": 5e-05, + "loss": 0.0759, + "step": 424 + }, + { + "epoch": 1.0328068043742407, + "grad_norm": 0.610801637172699, + "learning_rate": 5e-05, + "loss": 0.0593, + "step": 425 + }, + { + "epoch": 1.0352369380315918, + "grad_norm": 1.035436987876892, + "learning_rate": 5e-05, + "loss": 0.194, + "step": 426 + }, + { + "epoch": 1.037667071688943, + "grad_norm": 0.9973961114883423, + "learning_rate": 5e-05, + "loss": 0.0954, + "step": 427 + }, + { + "epoch": 1.040097205346294, + "grad_norm": 0.8617413640022278, + "learning_rate": 5e-05, + "loss": 0.0917, + "step": 428 + }, + { + "epoch": 1.0425273390036451, + "grad_norm": 0.9682510495185852, + "learning_rate": 5e-05, + "loss": 0.1391, + "step": 429 + }, + { + "epoch": 1.0449574726609963, + "grad_norm": 0.3044584393501282, + "learning_rate": 5e-05, + "loss": 0.025, + "step": 430 + }, + { + "epoch": 1.0473876063183476, + "grad_norm": 0.6183155179023743, + "learning_rate": 5e-05, + "loss": 0.0347, + "step": 431 + }, + { + "epoch": 1.0498177399756987, + "grad_norm": 0.37442171573638916, + "learning_rate": 5e-05, + "loss": 0.0452, + "step": 432 + }, + { + "epoch": 1.0522478736330498, + "grad_norm": 0.565359890460968, + "learning_rate": 5e-05, + "loss": 0.0943, + "step": 433 + }, + { + "epoch": 1.054678007290401, + "grad_norm": 0.6165122389793396, + "learning_rate": 5e-05, + "loss": 0.0851, + "step": 434 + }, + { + "epoch": 1.057108140947752, + "grad_norm": 0.7150765061378479, + "learning_rate": 5e-05, + "loss": 0.0704, + "step": 435 + }, + { + "epoch": 1.0595382746051032, + "grad_norm": 0.7983779311180115, + "learning_rate": 5e-05, + "loss": 0.121, + "step": 436 + }, + { + "epoch": 1.0619684082624545, + "grad_norm": 0.5900336503982544, + "learning_rate": 5e-05, + "loss": 0.0938, + "step": 437 + }, + { + "epoch": 1.0643985419198057, + "grad_norm": 0.6962295174598694, + "learning_rate": 5e-05, + "loss": 0.1227, + "step": 438 + }, + { + "epoch": 1.0668286755771568, + "grad_norm": 1.0119210481643677, + "learning_rate": 5e-05, + "loss": 0.0737, + "step": 439 + }, + { + "epoch": 1.069258809234508, + "grad_norm": 0.4835350811481476, + "learning_rate": 5e-05, + "loss": 0.0644, + "step": 440 + }, + { + "epoch": 1.071688942891859, + "grad_norm": 0.6343538761138916, + "learning_rate": 5e-05, + "loss": 0.0796, + "step": 441 + }, + { + "epoch": 1.0741190765492101, + "grad_norm": 0.41634368896484375, + "learning_rate": 5e-05, + "loss": 0.087, + "step": 442 + }, + { + "epoch": 1.0765492102065615, + "grad_norm": 0.7903019189834595, + "learning_rate": 5e-05, + "loss": 0.0564, + "step": 443 + }, + { + "epoch": 1.0789793438639126, + "grad_norm": 0.45615118741989136, + "learning_rate": 5e-05, + "loss": 0.0928, + "step": 444 + }, + { + "epoch": 1.0814094775212637, + "grad_norm": 0.5224602222442627, + "learning_rate": 5e-05, + "loss": 0.1134, + "step": 445 + }, + { + "epoch": 1.0838396111786148, + "grad_norm": 0.7974500060081482, + "learning_rate": 5e-05, + "loss": 0.1592, + "step": 446 + }, + { + "epoch": 1.086269744835966, + "grad_norm": 0.545687735080719, + "learning_rate": 5e-05, + "loss": 0.0892, + "step": 447 + }, + { + "epoch": 1.088699878493317, + "grad_norm": 0.88418048620224, + "learning_rate": 5e-05, + "loss": 0.0406, + "step": 448 + }, + { + "epoch": 1.0911300121506682, + "grad_norm": 0.6175111532211304, + "learning_rate": 5e-05, + "loss": 0.0536, + "step": 449 + }, + { + "epoch": 1.0935601458080195, + "grad_norm": 0.5780444741249084, + "learning_rate": 5e-05, + "loss": 0.0806, + "step": 450 + }, + { + "epoch": 1.0959902794653706, + "grad_norm": 2.7306559085845947, + "learning_rate": 5e-05, + "loss": 0.102, + "step": 451 + }, + { + "epoch": 1.0984204131227218, + "grad_norm": 0.8727555871009827, + "learning_rate": 5e-05, + "loss": 0.0723, + "step": 452 + }, + { + "epoch": 1.1008505467800729, + "grad_norm": 0.49243301153182983, + "learning_rate": 5e-05, + "loss": 0.0589, + "step": 453 + }, + { + "epoch": 1.103280680437424, + "grad_norm": 1.0444567203521729, + "learning_rate": 5e-05, + "loss": 0.1075, + "step": 454 + }, + { + "epoch": 1.1057108140947751, + "grad_norm": 0.5982443690299988, + "learning_rate": 5e-05, + "loss": 0.0646, + "step": 455 + }, + { + "epoch": 1.1081409477521265, + "grad_norm": 0.4428943395614624, + "learning_rate": 5e-05, + "loss": 0.0421, + "step": 456 + }, + { + "epoch": 1.1105710814094776, + "grad_norm": 0.612250566482544, + "learning_rate": 5e-05, + "loss": 0.0694, + "step": 457 + }, + { + "epoch": 1.1130012150668287, + "grad_norm": 0.8445409536361694, + "learning_rate": 5e-05, + "loss": 0.1089, + "step": 458 + }, + { + "epoch": 1.1154313487241798, + "grad_norm": 0.5859739184379578, + "learning_rate": 5e-05, + "loss": 0.1143, + "step": 459 + }, + { + "epoch": 1.117861482381531, + "grad_norm": 0.38835424184799194, + "learning_rate": 5e-05, + "loss": 0.0866, + "step": 460 + }, + { + "epoch": 1.120291616038882, + "grad_norm": 0.7463281750679016, + "learning_rate": 5e-05, + "loss": 0.0815, + "step": 461 + }, + { + "epoch": 1.1227217496962334, + "grad_norm": 0.6839048266410828, + "learning_rate": 5e-05, + "loss": 0.1349, + "step": 462 + }, + { + "epoch": 1.1251518833535845, + "grad_norm": 1.4645401239395142, + "learning_rate": 5e-05, + "loss": 0.0799, + "step": 463 + }, + { + "epoch": 1.1275820170109356, + "grad_norm": 0.7507435083389282, + "learning_rate": 5e-05, + "loss": 0.0598, + "step": 464 + }, + { + "epoch": 1.1300121506682868, + "grad_norm": 0.7818620800971985, + "learning_rate": 5e-05, + "loss": 0.0938, + "step": 465 + }, + { + "epoch": 1.1324422843256379, + "grad_norm": 0.6816830635070801, + "learning_rate": 5e-05, + "loss": 0.0787, + "step": 466 + }, + { + "epoch": 1.134872417982989, + "grad_norm": 0.575626015663147, + "learning_rate": 5e-05, + "loss": 0.1461, + "step": 467 + }, + { + "epoch": 1.13730255164034, + "grad_norm": 0.73714679479599, + "learning_rate": 5e-05, + "loss": 0.0761, + "step": 468 + }, + { + "epoch": 1.1397326852976915, + "grad_norm": 0.7288960814476013, + "learning_rate": 5e-05, + "loss": 0.074, + "step": 469 + }, + { + "epoch": 1.1421628189550426, + "grad_norm": 0.5384907126426697, + "learning_rate": 5e-05, + "loss": 0.0732, + "step": 470 + }, + { + "epoch": 1.1445929526123937, + "grad_norm": 0.6071632504463196, + "learning_rate": 5e-05, + "loss": 0.03, + "step": 471 + }, + { + "epoch": 1.1470230862697448, + "grad_norm": 0.5744608044624329, + "learning_rate": 5e-05, + "loss": 0.0912, + "step": 472 + }, + { + "epoch": 1.149453219927096, + "grad_norm": 0.9392772912979126, + "learning_rate": 5e-05, + "loss": 0.1024, + "step": 473 + }, + { + "epoch": 1.1518833535844473, + "grad_norm": 1.170997142791748, + "learning_rate": 5e-05, + "loss": 0.1027, + "step": 474 + }, + { + "epoch": 1.1543134872417984, + "grad_norm": 0.791221559047699, + "learning_rate": 5e-05, + "loss": 0.1305, + "step": 475 + }, + { + "epoch": 1.1567436208991495, + "grad_norm": 0.5443055033683777, + "learning_rate": 5e-05, + "loss": 0.0749, + "step": 476 + }, + { + "epoch": 1.1591737545565006, + "grad_norm": 0.8006246089935303, + "learning_rate": 5e-05, + "loss": 0.0976, + "step": 477 + }, + { + "epoch": 1.1616038882138517, + "grad_norm": 0.47016945481300354, + "learning_rate": 5e-05, + "loss": 0.0386, + "step": 478 + }, + { + "epoch": 1.1640340218712029, + "grad_norm": 0.7536148428916931, + "learning_rate": 5e-05, + "loss": 0.1385, + "step": 479 + }, + { + "epoch": 1.166464155528554, + "grad_norm": 0.6575655341148376, + "learning_rate": 5e-05, + "loss": 0.0851, + "step": 480 + }, + { + "epoch": 1.1688942891859053, + "grad_norm": 0.484667032957077, + "learning_rate": 5e-05, + "loss": 0.0301, + "step": 481 + }, + { + "epoch": 1.1713244228432564, + "grad_norm": 0.8194088339805603, + "learning_rate": 5e-05, + "loss": 0.129, + "step": 482 + }, + { + "epoch": 1.1737545565006076, + "grad_norm": 0.6559669375419617, + "learning_rate": 5e-05, + "loss": 0.0667, + "step": 483 + }, + { + "epoch": 1.1761846901579587, + "grad_norm": 0.8752015829086304, + "learning_rate": 5e-05, + "loss": 0.0637, + "step": 484 + }, + { + "epoch": 1.1786148238153098, + "grad_norm": 0.6029524207115173, + "learning_rate": 5e-05, + "loss": 0.0557, + "step": 485 + }, + { + "epoch": 1.181044957472661, + "grad_norm": 0.5431787371635437, + "learning_rate": 5e-05, + "loss": 0.0338, + "step": 486 + }, + { + "epoch": 1.183475091130012, + "grad_norm": 0.5199941992759705, + "learning_rate": 5e-05, + "loss": 0.0603, + "step": 487 + }, + { + "epoch": 1.1859052247873634, + "grad_norm": 0.520455539226532, + "learning_rate": 5e-05, + "loss": 0.0221, + "step": 488 + }, + { + "epoch": 1.1883353584447145, + "grad_norm": 0.5124850273132324, + "learning_rate": 5e-05, + "loss": 0.0306, + "step": 489 + }, + { + "epoch": 1.1907654921020656, + "grad_norm": 0.6515238881111145, + "learning_rate": 5e-05, + "loss": 0.0612, + "step": 490 + }, + { + "epoch": 1.1931956257594167, + "grad_norm": 0.816235363483429, + "learning_rate": 5e-05, + "loss": 0.1585, + "step": 491 + }, + { + "epoch": 1.1956257594167679, + "grad_norm": 0.6307040452957153, + "learning_rate": 5e-05, + "loss": 0.0551, + "step": 492 + }, + { + "epoch": 1.1980558930741192, + "grad_norm": 0.46716415882110596, + "learning_rate": 5e-05, + "loss": 0.0457, + "step": 493 + }, + { + "epoch": 1.2004860267314703, + "grad_norm": 0.5497251749038696, + "learning_rate": 5e-05, + "loss": 0.0874, + "step": 494 + }, + { + "epoch": 1.2029161603888214, + "grad_norm": 1.0956586599349976, + "learning_rate": 5e-05, + "loss": 0.0796, + "step": 495 + }, + { + "epoch": 1.2053462940461726, + "grad_norm": 0.6908737421035767, + "learning_rate": 5e-05, + "loss": 0.0406, + "step": 496 + }, + { + "epoch": 1.2077764277035237, + "grad_norm": 0.8598071932792664, + "learning_rate": 5e-05, + "loss": 0.1157, + "step": 497 + }, + { + "epoch": 1.2102065613608748, + "grad_norm": 0.6752752065658569, + "learning_rate": 5e-05, + "loss": 0.068, + "step": 498 + }, + { + "epoch": 1.212636695018226, + "grad_norm": 0.5425324440002441, + "learning_rate": 5e-05, + "loss": 0.0976, + "step": 499 + }, + { + "epoch": 1.2150668286755772, + "grad_norm": 0.7367557883262634, + "learning_rate": 5e-05, + "loss": 0.1028, + "step": 500 + }, + { + "epoch": 1.2150668286755772, + "eval_loss": 0.23548956215381622, + "eval_runtime": 512.2983, + "eval_samples_per_second": 5.364, + "eval_steps_per_second": 0.671, + "step": 500 + }, + { + "epoch": 1.2174969623329284, + "grad_norm": 0.8351901769638062, + "learning_rate": 5e-05, + "loss": 0.0927, + "step": 501 + }, + { + "epoch": 1.2199270959902795, + "grad_norm": 0.5388638377189636, + "learning_rate": 5e-05, + "loss": 0.0956, + "step": 502 + }, + { + "epoch": 1.2223572296476306, + "grad_norm": 0.9194818139076233, + "learning_rate": 5e-05, + "loss": 0.0533, + "step": 503 + }, + { + "epoch": 1.2247873633049817, + "grad_norm": 0.7008382081985474, + "learning_rate": 5e-05, + "loss": 0.0554, + "step": 504 + }, + { + "epoch": 1.2272174969623328, + "grad_norm": 0.5537183284759521, + "learning_rate": 5e-05, + "loss": 0.1277, + "step": 505 + }, + { + "epoch": 1.229647630619684, + "grad_norm": 0.570819079875946, + "learning_rate": 5e-05, + "loss": 0.1037, + "step": 506 + }, + { + "epoch": 1.2320777642770353, + "grad_norm": 0.5867807269096375, + "learning_rate": 5e-05, + "loss": 0.0578, + "step": 507 + }, + { + "epoch": 1.2345078979343864, + "grad_norm": 0.7660566568374634, + "learning_rate": 5e-05, + "loss": 0.0856, + "step": 508 + }, + { + "epoch": 1.2369380315917375, + "grad_norm": 0.7493465542793274, + "learning_rate": 5e-05, + "loss": 0.0567, + "step": 509 + }, + { + "epoch": 1.2393681652490887, + "grad_norm": 0.6718665957450867, + "learning_rate": 5e-05, + "loss": 0.1113, + "step": 510 + }, + { + "epoch": 1.2417982989064398, + "grad_norm": 0.7018153667449951, + "learning_rate": 5e-05, + "loss": 0.0966, + "step": 511 + }, + { + "epoch": 1.2442284325637911, + "grad_norm": 1.3940482139587402, + "learning_rate": 5e-05, + "loss": 0.1352, + "step": 512 + }, + { + "epoch": 1.2466585662211422, + "grad_norm": 0.663314938545227, + "learning_rate": 5e-05, + "loss": 0.0556, + "step": 513 + }, + { + "epoch": 1.2490886998784934, + "grad_norm": 1.0090134143829346, + "learning_rate": 5e-05, + "loss": 0.0352, + "step": 514 + }, + { + "epoch": 1.2515188335358445, + "grad_norm": 0.5742064714431763, + "learning_rate": 5e-05, + "loss": 0.0669, + "step": 515 + }, + { + "epoch": 1.2539489671931956, + "grad_norm": 0.726700484752655, + "learning_rate": 5e-05, + "loss": 0.0533, + "step": 516 + }, + { + "epoch": 1.2563791008505467, + "grad_norm": 0.6788687109947205, + "learning_rate": 5e-05, + "loss": 0.0567, + "step": 517 + }, + { + "epoch": 1.2588092345078978, + "grad_norm": 0.48673132061958313, + "learning_rate": 5e-05, + "loss": 0.0406, + "step": 518 + }, + { + "epoch": 1.2612393681652492, + "grad_norm": 1.1016216278076172, + "learning_rate": 5e-05, + "loss": 0.0668, + "step": 519 + }, + { + "epoch": 1.2636695018226003, + "grad_norm": 1.2345398664474487, + "learning_rate": 5e-05, + "loss": 0.029, + "step": 520 + }, + { + "epoch": 1.2660996354799514, + "grad_norm": 0.7507830858230591, + "learning_rate": 5e-05, + "loss": 0.0991, + "step": 521 + }, + { + "epoch": 1.2685297691373025, + "grad_norm": 0.819266140460968, + "learning_rate": 5e-05, + "loss": 0.0723, + "step": 522 + }, + { + "epoch": 1.2709599027946537, + "grad_norm": 0.9876227974891663, + "learning_rate": 5e-05, + "loss": 0.1555, + "step": 523 + }, + { + "epoch": 1.273390036452005, + "grad_norm": 0.7902403473854065, + "learning_rate": 5e-05, + "loss": 0.156, + "step": 524 + }, + { + "epoch": 1.2758201701093559, + "grad_norm": 0.6342129111289978, + "learning_rate": 5e-05, + "loss": 0.1161, + "step": 525 + }, + { + "epoch": 1.2782503037667072, + "grad_norm": 0.9039121866226196, + "learning_rate": 5e-05, + "loss": 0.1078, + "step": 526 + }, + { + "epoch": 1.2806804374240583, + "grad_norm": 0.9820278882980347, + "learning_rate": 5e-05, + "loss": 0.1182, + "step": 527 + }, + { + "epoch": 1.2831105710814095, + "grad_norm": 0.45999497175216675, + "learning_rate": 5e-05, + "loss": 0.0469, + "step": 528 + }, + { + "epoch": 1.2855407047387606, + "grad_norm": 0.6754646301269531, + "learning_rate": 5e-05, + "loss": 0.0875, + "step": 529 + }, + { + "epoch": 1.2879708383961117, + "grad_norm": 0.7348721027374268, + "learning_rate": 5e-05, + "loss": 0.0639, + "step": 530 + }, + { + "epoch": 1.290400972053463, + "grad_norm": 0.7348721027374268, + "learning_rate": 5e-05, + "loss": 0.0764, + "step": 531 + }, + { + "epoch": 1.2928311057108142, + "grad_norm": 0.4331410229206085, + "learning_rate": 5e-05, + "loss": 0.0278, + "step": 532 + }, + { + "epoch": 1.2952612393681653, + "grad_norm": 0.7986178994178772, + "learning_rate": 5e-05, + "loss": 0.0893, + "step": 533 + }, + { + "epoch": 1.2976913730255164, + "grad_norm": 0.7255426049232483, + "learning_rate": 5e-05, + "loss": 0.0802, + "step": 534 + }, + { + "epoch": 1.3001215066828675, + "grad_norm": 0.590578556060791, + "learning_rate": 5e-05, + "loss": 0.0853, + "step": 535 + }, + { + "epoch": 1.3025516403402186, + "grad_norm": 0.6147257685661316, + "learning_rate": 5e-05, + "loss": 0.0462, + "step": 536 + }, + { + "epoch": 1.3049817739975698, + "grad_norm": 0.6174589991569519, + "learning_rate": 5e-05, + "loss": 0.0758, + "step": 537 + }, + { + "epoch": 1.307411907654921, + "grad_norm": 0.5428439378738403, + "learning_rate": 5e-05, + "loss": 0.1452, + "step": 538 + }, + { + "epoch": 1.3098420413122722, + "grad_norm": 0.554800271987915, + "learning_rate": 5e-05, + "loss": 0.0266, + "step": 539 + }, + { + "epoch": 1.3122721749696233, + "grad_norm": 0.627275824546814, + "learning_rate": 5e-05, + "loss": 0.0604, + "step": 540 + }, + { + "epoch": 1.3147023086269745, + "grad_norm": 0.5268173813819885, + "learning_rate": 5e-05, + "loss": 0.036, + "step": 541 + }, + { + "epoch": 1.3171324422843256, + "grad_norm": 0.4525201916694641, + "learning_rate": 5e-05, + "loss": 0.0746, + "step": 542 + }, + { + "epoch": 1.319562575941677, + "grad_norm": 1.299209475517273, + "learning_rate": 5e-05, + "loss": 0.0573, + "step": 543 + }, + { + "epoch": 1.3219927095990278, + "grad_norm": 0.5899752378463745, + "learning_rate": 5e-05, + "loss": 0.0373, + "step": 544 + }, + { + "epoch": 1.3244228432563792, + "grad_norm": 0.5044320225715637, + "learning_rate": 5e-05, + "loss": 0.0415, + "step": 545 + }, + { + "epoch": 1.3268529769137303, + "grad_norm": 0.4812248945236206, + "learning_rate": 5e-05, + "loss": 0.0898, + "step": 546 + }, + { + "epoch": 1.3292831105710814, + "grad_norm": 0.47461068630218506, + "learning_rate": 5e-05, + "loss": 0.0248, + "step": 547 + }, + { + "epoch": 1.3317132442284325, + "grad_norm": 0.5620236396789551, + "learning_rate": 5e-05, + "loss": 0.0418, + "step": 548 + }, + { + "epoch": 1.3341433778857836, + "grad_norm": 0.8658341765403748, + "learning_rate": 5e-05, + "loss": 0.0208, + "step": 549 + }, + { + "epoch": 1.336573511543135, + "grad_norm": 0.6640876531600952, + "learning_rate": 5e-05, + "loss": 0.0652, + "step": 550 + }, + { + "epoch": 1.339003645200486, + "grad_norm": 0.3873005509376526, + "learning_rate": 5e-05, + "loss": 0.018, + "step": 551 + }, + { + "epoch": 1.3414337788578372, + "grad_norm": 0.4546399414539337, + "learning_rate": 5e-05, + "loss": 0.0823, + "step": 552 + }, + { + "epoch": 1.3438639125151883, + "grad_norm": 0.5188842415809631, + "learning_rate": 5e-05, + "loss": 0.0604, + "step": 553 + }, + { + "epoch": 1.3462940461725394, + "grad_norm": 0.41030353307724, + "learning_rate": 5e-05, + "loss": 0.0112, + "step": 554 + }, + { + "epoch": 1.3487241798298906, + "grad_norm": 0.7154520750045776, + "learning_rate": 5e-05, + "loss": 0.0687, + "step": 555 + }, + { + "epoch": 1.3511543134872417, + "grad_norm": 0.6743410229682922, + "learning_rate": 5e-05, + "loss": 0.1262, + "step": 556 + }, + { + "epoch": 1.353584447144593, + "grad_norm": 0.38846057653427124, + "learning_rate": 5e-05, + "loss": 0.0371, + "step": 557 + }, + { + "epoch": 1.3560145808019441, + "grad_norm": 0.5022516846656799, + "learning_rate": 5e-05, + "loss": 0.0159, + "step": 558 + }, + { + "epoch": 1.3584447144592953, + "grad_norm": 0.5445972084999084, + "learning_rate": 5e-05, + "loss": 0.0652, + "step": 559 + }, + { + "epoch": 1.3608748481166464, + "grad_norm": 0.6346146464347839, + "learning_rate": 5e-05, + "loss": 0.1128, + "step": 560 + }, + { + "epoch": 1.3633049817739975, + "grad_norm": 0.7129392623901367, + "learning_rate": 5e-05, + "loss": 0.059, + "step": 561 + }, + { + "epoch": 1.3657351154313488, + "grad_norm": 0.4905102252960205, + "learning_rate": 5e-05, + "loss": 0.1114, + "step": 562 + }, + { + "epoch": 1.3681652490886997, + "grad_norm": 0.7290562391281128, + "learning_rate": 5e-05, + "loss": 0.1029, + "step": 563 + }, + { + "epoch": 1.370595382746051, + "grad_norm": 0.5576648116111755, + "learning_rate": 5e-05, + "loss": 0.0383, + "step": 564 + }, + { + "epoch": 1.3730255164034022, + "grad_norm": 0.595310389995575, + "learning_rate": 5e-05, + "loss": 0.0906, + "step": 565 + }, + { + "epoch": 1.3754556500607533, + "grad_norm": 0.7409107089042664, + "learning_rate": 5e-05, + "loss": 0.0239, + "step": 566 + }, + { + "epoch": 1.3778857837181044, + "grad_norm": 1.0220900774002075, + "learning_rate": 5e-05, + "loss": 0.0537, + "step": 567 + }, + { + "epoch": 1.3803159173754556, + "grad_norm": 1.0970425605773926, + "learning_rate": 5e-05, + "loss": 0.0615, + "step": 568 + }, + { + "epoch": 1.382746051032807, + "grad_norm": 0.45453736186027527, + "learning_rate": 5e-05, + "loss": 0.1, + "step": 569 + }, + { + "epoch": 1.385176184690158, + "grad_norm": 0.61972576379776, + "learning_rate": 5e-05, + "loss": 0.1029, + "step": 570 + }, + { + "epoch": 1.3876063183475091, + "grad_norm": 0.47734493017196655, + "learning_rate": 5e-05, + "loss": 0.0683, + "step": 571 + }, + { + "epoch": 1.3900364520048603, + "grad_norm": 0.3732990026473999, + "learning_rate": 5e-05, + "loss": 0.0706, + "step": 572 + }, + { + "epoch": 1.3924665856622114, + "grad_norm": 0.49241572618484497, + "learning_rate": 5e-05, + "loss": 0.0667, + "step": 573 + }, + { + "epoch": 1.3948967193195625, + "grad_norm": 0.6907554268836975, + "learning_rate": 5e-05, + "loss": 0.0602, + "step": 574 + }, + { + "epoch": 1.3973268529769136, + "grad_norm": 0.8935093879699707, + "learning_rate": 5e-05, + "loss": 0.2114, + "step": 575 + }, + { + "epoch": 1.399756986634265, + "grad_norm": 0.2839941680431366, + "learning_rate": 5e-05, + "loss": 0.0257, + "step": 576 + }, + { + "epoch": 1.402187120291616, + "grad_norm": 0.7202839255332947, + "learning_rate": 5e-05, + "loss": 0.0878, + "step": 577 + }, + { + "epoch": 1.4046172539489672, + "grad_norm": 0.6520135402679443, + "learning_rate": 5e-05, + "loss": 0.0295, + "step": 578 + }, + { + "epoch": 1.4070473876063183, + "grad_norm": 0.7360886335372925, + "learning_rate": 5e-05, + "loss": 0.065, + "step": 579 + }, + { + "epoch": 1.4094775212636694, + "grad_norm": 0.5855164527893066, + "learning_rate": 5e-05, + "loss": 0.0752, + "step": 580 + }, + { + "epoch": 1.4119076549210208, + "grad_norm": 0.38474756479263306, + "learning_rate": 5e-05, + "loss": 0.0541, + "step": 581 + }, + { + "epoch": 1.414337788578372, + "grad_norm": 0.5105370283126831, + "learning_rate": 5e-05, + "loss": 0.0749, + "step": 582 + }, + { + "epoch": 1.416767922235723, + "grad_norm": 0.5076673030853271, + "learning_rate": 5e-05, + "loss": 0.0528, + "step": 583 + }, + { + "epoch": 1.4191980558930741, + "grad_norm": 0.5034826397895813, + "learning_rate": 5e-05, + "loss": 0.0505, + "step": 584 + }, + { + "epoch": 1.4216281895504252, + "grad_norm": 0.6378771066665649, + "learning_rate": 5e-05, + "loss": 0.0676, + "step": 585 + }, + { + "epoch": 1.4240583232077764, + "grad_norm": 0.5122814774513245, + "learning_rate": 5e-05, + "loss": 0.0515, + "step": 586 + }, + { + "epoch": 1.4264884568651275, + "grad_norm": 0.5807358026504517, + "learning_rate": 5e-05, + "loss": 0.0507, + "step": 587 + }, + { + "epoch": 1.4289185905224788, + "grad_norm": 0.5156981348991394, + "learning_rate": 5e-05, + "loss": 0.0503, + "step": 588 + }, + { + "epoch": 1.43134872417983, + "grad_norm": 0.9181635975837708, + "learning_rate": 5e-05, + "loss": 0.0964, + "step": 589 + }, + { + "epoch": 1.433778857837181, + "grad_norm": 0.38104763627052307, + "learning_rate": 5e-05, + "loss": 0.055, + "step": 590 + }, + { + "epoch": 1.4362089914945322, + "grad_norm": 0.4987882673740387, + "learning_rate": 5e-05, + "loss": 0.0347, + "step": 591 + }, + { + "epoch": 1.4386391251518833, + "grad_norm": 0.5201525688171387, + "learning_rate": 5e-05, + "loss": 0.0904, + "step": 592 + }, + { + "epoch": 1.4410692588092346, + "grad_norm": 0.5238099694252014, + "learning_rate": 5e-05, + "loss": 0.077, + "step": 593 + }, + { + "epoch": 1.4434993924665855, + "grad_norm": 0.924893319606781, + "learning_rate": 5e-05, + "loss": 0.1248, + "step": 594 + }, + { + "epoch": 1.4459295261239369, + "grad_norm": 0.6318195462226868, + "learning_rate": 5e-05, + "loss": 0.0267, + "step": 595 + }, + { + "epoch": 1.448359659781288, + "grad_norm": 0.9913945198059082, + "learning_rate": 5e-05, + "loss": 0.0917, + "step": 596 + }, + { + "epoch": 1.4507897934386391, + "grad_norm": 0.4909703731536865, + "learning_rate": 5e-05, + "loss": 0.036, + "step": 597 + }, + { + "epoch": 1.4532199270959902, + "grad_norm": 0.5913828015327454, + "learning_rate": 5e-05, + "loss": 0.0353, + "step": 598 + }, + { + "epoch": 1.4556500607533414, + "grad_norm": 0.3615020215511322, + "learning_rate": 5e-05, + "loss": 0.0517, + "step": 599 + }, + { + "epoch": 1.4580801944106927, + "grad_norm": 0.5106006264686584, + "learning_rate": 5e-05, + "loss": 0.0444, + "step": 600 + }, + { + "epoch": 1.4580801944106927, + "eval_loss": 0.22935771942138672, + "eval_runtime": 507.4275, + "eval_samples_per_second": 5.416, + "eval_steps_per_second": 0.678, + "step": 600 + }, + { + "epoch": 1.4605103280680438, + "grad_norm": 0.7134807705879211, + "learning_rate": 5e-05, + "loss": 0.056, + "step": 601 + }, + { + "epoch": 1.462940461725395, + "grad_norm": 0.2919784188270569, + "learning_rate": 5e-05, + "loss": 0.0146, + "step": 602 + }, + { + "epoch": 1.465370595382746, + "grad_norm": 0.6526296734809875, + "learning_rate": 5e-05, + "loss": 0.1207, + "step": 603 + }, + { + "epoch": 1.4678007290400972, + "grad_norm": 0.5833859443664551, + "learning_rate": 5e-05, + "loss": 0.0657, + "step": 604 + }, + { + "epoch": 1.4702308626974483, + "grad_norm": 0.6107626557350159, + "learning_rate": 5e-05, + "loss": 0.0655, + "step": 605 + }, + { + "epoch": 1.4726609963547994, + "grad_norm": 0.5227603912353516, + "learning_rate": 5e-05, + "loss": 0.0504, + "step": 606 + }, + { + "epoch": 1.4750911300121508, + "grad_norm": 0.6677300333976746, + "learning_rate": 5e-05, + "loss": 0.0465, + "step": 607 + }, + { + "epoch": 1.4775212636695019, + "grad_norm": 0.7898095846176147, + "learning_rate": 5e-05, + "loss": 0.0932, + "step": 608 + }, + { + "epoch": 1.479951397326853, + "grad_norm": 0.9366335868835449, + "learning_rate": 5e-05, + "loss": 0.0819, + "step": 609 + }, + { + "epoch": 1.482381530984204, + "grad_norm": 0.6056090593338013, + "learning_rate": 5e-05, + "loss": 0.0638, + "step": 610 + }, + { + "epoch": 1.4848116646415552, + "grad_norm": 0.7640931606292725, + "learning_rate": 5e-05, + "loss": 0.1152, + "step": 611 + }, + { + "epoch": 1.4872417982989066, + "grad_norm": 0.7496662735939026, + "learning_rate": 5e-05, + "loss": 0.0461, + "step": 612 + }, + { + "epoch": 1.4896719319562575, + "grad_norm": 0.6870647072792053, + "learning_rate": 5e-05, + "loss": 0.0812, + "step": 613 + }, + { + "epoch": 1.4921020656136088, + "grad_norm": 0.7589767575263977, + "learning_rate": 5e-05, + "loss": 0.0846, + "step": 614 + }, + { + "epoch": 1.49453219927096, + "grad_norm": 0.681777834892273, + "learning_rate": 5e-05, + "loss": 0.0407, + "step": 615 + }, + { + "epoch": 1.496962332928311, + "grad_norm": 0.7369627952575684, + "learning_rate": 5e-05, + "loss": 0.0955, + "step": 616 + }, + { + "epoch": 1.4993924665856622, + "grad_norm": 0.7186546921730042, + "learning_rate": 5e-05, + "loss": 0.0745, + "step": 617 + }, + { + "epoch": 1.5018226002430133, + "grad_norm": 0.6271875500679016, + "learning_rate": 5e-05, + "loss": 0.0533, + "step": 618 + }, + { + "epoch": 1.5042527339003646, + "grad_norm": 0.5378838777542114, + "learning_rate": 5e-05, + "loss": 0.0688, + "step": 619 + }, + { + "epoch": 1.5066828675577155, + "grad_norm": 0.7019214630126953, + "learning_rate": 5e-05, + "loss": 0.0369, + "step": 620 + }, + { + "epoch": 1.5091130012150669, + "grad_norm": 0.9618707299232483, + "learning_rate": 5e-05, + "loss": 0.0469, + "step": 621 + }, + { + "epoch": 1.511543134872418, + "grad_norm": 0.4820462167263031, + "learning_rate": 5e-05, + "loss": 0.074, + "step": 622 + }, + { + "epoch": 1.513973268529769, + "grad_norm": 0.411123126745224, + "learning_rate": 5e-05, + "loss": 0.0307, + "step": 623 + }, + { + "epoch": 1.5164034021871204, + "grad_norm": 0.5718781352043152, + "learning_rate": 5e-05, + "loss": 0.0276, + "step": 624 + }, + { + "epoch": 1.5188335358444713, + "grad_norm": 0.4341799020767212, + "learning_rate": 5e-05, + "loss": 0.0651, + "step": 625 + }, + { + "epoch": 1.5212636695018227, + "grad_norm": 0.515855073928833, + "learning_rate": 5e-05, + "loss": 0.0413, + "step": 626 + }, + { + "epoch": 1.5236938031591738, + "grad_norm": 0.6174566149711609, + "learning_rate": 5e-05, + "loss": 0.0307, + "step": 627 + }, + { + "epoch": 1.526123936816525, + "grad_norm": 0.5931458473205566, + "learning_rate": 5e-05, + "loss": 0.0852, + "step": 628 + }, + { + "epoch": 1.528554070473876, + "grad_norm": 0.46777036786079407, + "learning_rate": 5e-05, + "loss": 0.0438, + "step": 629 + }, + { + "epoch": 1.5309842041312272, + "grad_norm": 0.4254392087459564, + "learning_rate": 5e-05, + "loss": 0.0606, + "step": 630 + }, + { + "epoch": 1.5334143377885785, + "grad_norm": 0.5670008063316345, + "learning_rate": 5e-05, + "loss": 0.0514, + "step": 631 + }, + { + "epoch": 1.5358444714459294, + "grad_norm": 1.1566509008407593, + "learning_rate": 5e-05, + "loss": 0.1133, + "step": 632 + }, + { + "epoch": 1.5382746051032807, + "grad_norm": 0.5660587549209595, + "learning_rate": 5e-05, + "loss": 0.0544, + "step": 633 + }, + { + "epoch": 1.5407047387606319, + "grad_norm": 0.5406026840209961, + "learning_rate": 5e-05, + "loss": 0.046, + "step": 634 + }, + { + "epoch": 1.543134872417983, + "grad_norm": 0.49723613262176514, + "learning_rate": 5e-05, + "loss": 0.0177, + "step": 635 + }, + { + "epoch": 1.545565006075334, + "grad_norm": 1.263848900794983, + "learning_rate": 5e-05, + "loss": 0.0659, + "step": 636 + }, + { + "epoch": 1.5479951397326852, + "grad_norm": 0.6971784830093384, + "learning_rate": 5e-05, + "loss": 0.0747, + "step": 637 + }, + { + "epoch": 1.5504252733900366, + "grad_norm": 0.5171895623207092, + "learning_rate": 5e-05, + "loss": 0.0512, + "step": 638 + }, + { + "epoch": 1.5528554070473874, + "grad_norm": 0.7318109273910522, + "learning_rate": 5e-05, + "loss": 0.0655, + "step": 639 + }, + { + "epoch": 1.5552855407047388, + "grad_norm": 0.46023720502853394, + "learning_rate": 5e-05, + "loss": 0.0568, + "step": 640 + }, + { + "epoch": 1.55771567436209, + "grad_norm": 0.7200696468353271, + "learning_rate": 5e-05, + "loss": 0.0511, + "step": 641 + }, + { + "epoch": 1.560145808019441, + "grad_norm": 1.0338624715805054, + "learning_rate": 5e-05, + "loss": 0.0822, + "step": 642 + }, + { + "epoch": 1.5625759416767924, + "grad_norm": 0.7976377010345459, + "learning_rate": 5e-05, + "loss": 0.0623, + "step": 643 + }, + { + "epoch": 1.5650060753341433, + "grad_norm": 0.6757969260215759, + "learning_rate": 5e-05, + "loss": 0.0737, + "step": 644 + }, + { + "epoch": 1.5674362089914946, + "grad_norm": 1.4083917140960693, + "learning_rate": 5e-05, + "loss": 0.0374, + "step": 645 + }, + { + "epoch": 1.5698663426488457, + "grad_norm": 0.348634272813797, + "learning_rate": 5e-05, + "loss": 0.0249, + "step": 646 + }, + { + "epoch": 1.5722964763061968, + "grad_norm": 0.7224584817886353, + "learning_rate": 5e-05, + "loss": 0.1013, + "step": 647 + }, + { + "epoch": 1.574726609963548, + "grad_norm": 0.919998288154602, + "learning_rate": 5e-05, + "loss": 0.1093, + "step": 648 + }, + { + "epoch": 1.577156743620899, + "grad_norm": 0.7341464161872864, + "learning_rate": 5e-05, + "loss": 0.1234, + "step": 649 + }, + { + "epoch": 1.5795868772782504, + "grad_norm": 0.8548154234886169, + "learning_rate": 5e-05, + "loss": 0.1628, + "step": 650 + }, + { + "epoch": 1.5820170109356013, + "grad_norm": 0.7931060194969177, + "learning_rate": 5e-05, + "loss": 0.1439, + "step": 651 + }, + { + "epoch": 1.5844471445929527, + "grad_norm": 0.9627839922904968, + "learning_rate": 5e-05, + "loss": 0.0327, + "step": 652 + }, + { + "epoch": 1.5868772782503038, + "grad_norm": 0.8406124114990234, + "learning_rate": 5e-05, + "loss": 0.0574, + "step": 653 + }, + { + "epoch": 1.589307411907655, + "grad_norm": 0.7667601108551025, + "learning_rate": 5e-05, + "loss": 0.0551, + "step": 654 + }, + { + "epoch": 1.5917375455650062, + "grad_norm": 0.4747026264667511, + "learning_rate": 5e-05, + "loss": 0.052, + "step": 655 + }, + { + "epoch": 1.5941676792223571, + "grad_norm": 0.632870078086853, + "learning_rate": 5e-05, + "loss": 0.0587, + "step": 656 + }, + { + "epoch": 1.5965978128797085, + "grad_norm": 0.7700726985931396, + "learning_rate": 5e-05, + "loss": 0.0986, + "step": 657 + }, + { + "epoch": 1.5990279465370594, + "grad_norm": 1.121201992034912, + "learning_rate": 5e-05, + "loss": 0.0318, + "step": 658 + }, + { + "epoch": 1.6014580801944107, + "grad_norm": 0.6213607788085938, + "learning_rate": 5e-05, + "loss": 0.0932, + "step": 659 + }, + { + "epoch": 1.6038882138517618, + "grad_norm": 0.30946460366249084, + "learning_rate": 5e-05, + "loss": 0.0333, + "step": 660 + }, + { + "epoch": 1.606318347509113, + "grad_norm": 0.5171956419944763, + "learning_rate": 5e-05, + "loss": 0.0617, + "step": 661 + }, + { + "epoch": 1.6087484811664643, + "grad_norm": 0.6288155317306519, + "learning_rate": 5e-05, + "loss": 0.0506, + "step": 662 + }, + { + "epoch": 1.6111786148238152, + "grad_norm": 0.6943643689155579, + "learning_rate": 5e-05, + "loss": 0.0285, + "step": 663 + }, + { + "epoch": 1.6136087484811665, + "grad_norm": 0.8144752383232117, + "learning_rate": 5e-05, + "loss": 0.0484, + "step": 664 + }, + { + "epoch": 1.6160388821385177, + "grad_norm": 0.4773041307926178, + "learning_rate": 5e-05, + "loss": 0.0993, + "step": 665 + }, + { + "epoch": 1.6184690157958688, + "grad_norm": 0.7703022956848145, + "learning_rate": 5e-05, + "loss": 0.0735, + "step": 666 + }, + { + "epoch": 1.62089914945322, + "grad_norm": 0.6951743960380554, + "learning_rate": 5e-05, + "loss": 0.0446, + "step": 667 + }, + { + "epoch": 1.623329283110571, + "grad_norm": 0.4781995415687561, + "learning_rate": 5e-05, + "loss": 0.0178, + "step": 668 + }, + { + "epoch": 1.6257594167679223, + "grad_norm": 0.5066447854042053, + "learning_rate": 5e-05, + "loss": 0.0969, + "step": 669 + }, + { + "epoch": 1.6281895504252732, + "grad_norm": 0.4288381040096283, + "learning_rate": 5e-05, + "loss": 0.0317, + "step": 670 + }, + { + "epoch": 1.6306196840826246, + "grad_norm": 0.5544601678848267, + "learning_rate": 5e-05, + "loss": 0.035, + "step": 671 + }, + { + "epoch": 1.6330498177399757, + "grad_norm": 0.48424339294433594, + "learning_rate": 5e-05, + "loss": 0.0231, + "step": 672 + }, + { + "epoch": 1.6354799513973268, + "grad_norm": 0.770346462726593, + "learning_rate": 5e-05, + "loss": 0.0767, + "step": 673 + }, + { + "epoch": 1.6379100850546782, + "grad_norm": 0.7101414203643799, + "learning_rate": 5e-05, + "loss": 0.0247, + "step": 674 + }, + { + "epoch": 1.640340218712029, + "grad_norm": 0.9238234758377075, + "learning_rate": 5e-05, + "loss": 0.0528, + "step": 675 + }, + { + "epoch": 1.6427703523693804, + "grad_norm": 0.7192332148551941, + "learning_rate": 5e-05, + "loss": 0.0738, + "step": 676 + }, + { + "epoch": 1.6452004860267315, + "grad_norm": 0.5393381118774414, + "learning_rate": 5e-05, + "loss": 0.0337, + "step": 677 + }, + { + "epoch": 1.6476306196840826, + "grad_norm": 0.2713688015937805, + "learning_rate": 5e-05, + "loss": 0.0159, + "step": 678 + }, + { + "epoch": 1.6500607533414338, + "grad_norm": 0.3296976089477539, + "learning_rate": 5e-05, + "loss": 0.0608, + "step": 679 + }, + { + "epoch": 1.6524908869987849, + "grad_norm": 0.4995911717414856, + "learning_rate": 5e-05, + "loss": 0.0119, + "step": 680 + }, + { + "epoch": 1.6549210206561362, + "grad_norm": 0.5983487367630005, + "learning_rate": 5e-05, + "loss": 0.0188, + "step": 681 + }, + { + "epoch": 1.6573511543134871, + "grad_norm": 0.4184721112251282, + "learning_rate": 5e-05, + "loss": 0.0207, + "step": 682 + }, + { + "epoch": 1.6597812879708385, + "grad_norm": 0.6056463122367859, + "learning_rate": 5e-05, + "loss": 0.0321, + "step": 683 + }, + { + "epoch": 1.6622114216281896, + "grad_norm": 0.8600037693977356, + "learning_rate": 5e-05, + "loss": 0.0716, + "step": 684 + }, + { + "epoch": 1.6646415552855407, + "grad_norm": 0.7863733768463135, + "learning_rate": 5e-05, + "loss": 0.1251, + "step": 685 + }, + { + "epoch": 1.6670716889428918, + "grad_norm": 0.4304426908493042, + "learning_rate": 5e-05, + "loss": 0.0824, + "step": 686 + }, + { + "epoch": 1.669501822600243, + "grad_norm": 0.6984259486198425, + "learning_rate": 5e-05, + "loss": 0.0332, + "step": 687 + }, + { + "epoch": 1.6719319562575943, + "grad_norm": 0.400698184967041, + "learning_rate": 5e-05, + "loss": 0.0444, + "step": 688 + }, + { + "epoch": 1.6743620899149452, + "grad_norm": 0.5730535387992859, + "learning_rate": 5e-05, + "loss": 0.0488, + "step": 689 + }, + { + "epoch": 1.6767922235722965, + "grad_norm": 0.4522067606449127, + "learning_rate": 5e-05, + "loss": 0.0596, + "step": 690 + }, + { + "epoch": 1.6792223572296476, + "grad_norm": 0.44992417097091675, + "learning_rate": 5e-05, + "loss": 0.0311, + "step": 691 + }, + { + "epoch": 1.6816524908869988, + "grad_norm": 0.8305267095565796, + "learning_rate": 5e-05, + "loss": 0.0933, + "step": 692 + }, + { + "epoch": 1.68408262454435, + "grad_norm": 0.7188434600830078, + "learning_rate": 5e-05, + "loss": 0.0443, + "step": 693 + }, + { + "epoch": 1.686512758201701, + "grad_norm": 0.5052474141120911, + "learning_rate": 5e-05, + "loss": 0.1067, + "step": 694 + }, + { + "epoch": 1.6889428918590523, + "grad_norm": 0.686128556728363, + "learning_rate": 5e-05, + "loss": 0.055, + "step": 695 + }, + { + "epoch": 1.6913730255164034, + "grad_norm": 0.4116796851158142, + "learning_rate": 5e-05, + "loss": 0.0728, + "step": 696 + }, + { + "epoch": 1.6938031591737546, + "grad_norm": 0.44450122117996216, + "learning_rate": 5e-05, + "loss": 0.0359, + "step": 697 + }, + { + "epoch": 1.6962332928311057, + "grad_norm": 0.3920440375804901, + "learning_rate": 5e-05, + "loss": 0.0654, + "step": 698 + }, + { + "epoch": 1.6986634264884568, + "grad_norm": 0.7238947749137878, + "learning_rate": 5e-05, + "loss": 0.0593, + "step": 699 + }, + { + "epoch": 1.7010935601458081, + "grad_norm": 0.3644774854183197, + "learning_rate": 5e-05, + "loss": 0.054, + "step": 700 + }, + { + "epoch": 1.7010935601458081, + "eval_loss": 0.23474913835525513, + "eval_runtime": 505.0402, + "eval_samples_per_second": 5.441, + "eval_steps_per_second": 0.681, + "step": 700 + }, + { + "epoch": 1.7010935601458081, + "step": 700, + "total_flos": 2.1333404857820774e+18, + "train_loss": 0.12394342508699213, + "train_runtime": 14191.792, + "train_samples_per_second": 2.255, + "train_steps_per_second": 0.07 + } + ], + "logging_steps": 1.0, + "max_steps": 1000, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 100, + "total_flos": 2.1333404857820774e+18, + "train_batch_size": 2, + "trial_name": null, + "trial_params": null +}